var/home/core/zuul-output/0000755000175000017500000000000015135256710014532 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015135262110015465 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000222627515135262033020266 0ustar corecoreduikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD )?KEڤ펯_ˎ6Ϸ7+%f?長ox[o8W5և!Kޒ/h3_.gSeq5v(×_~^ǿq]n>߮}+ԏbś E^"Y^-Vۋz7wH׋0g"ŒGǯguz|ny;#)a "b BLc?^^4[ftlR%KF^j 8DΆgS^Kz۞_W#|`zIlp_@oEy5 fs&2x*g+W4m ɭiE߳Kf^?·0* TQ0Z%bb oHIl.f/M1FJdl!و4Gf#C2lIw]BPIjfkAubTI *JB4?PxQs# `LK3@g(C U {oLtiGgz֝$,z'vǛVB} eRB0R딏]dP>Li.`|!>ڌj+ACl21E^#QDuxGvZ4c$)9ӋrYWoxCNQWs]8M%3KpNGIrND}2SRCK.(^$0^@hH9%!40Jm>*Kdg?y7|&#)3+o,2s%R>!%*XC7Ln* wCƕH#FLzsѹ Xߛk׹1{,wŻ4v+(n^RϚOGO;5p Cj·1z_j( ,"z-Ee}t(QCuˠMkmi+2z5iݸ6C~z+_Ex$\}*9h>t m2m`QɢJ[a|$ᑨj:D+ʎ; 9Gacm_jY-y`)͐o΁GWo(C U ?}aK+d&?>Y;ufʕ"uZ0EyT0: =XVy#iEW&q]#v0nFNV-9JrdK\D2s&[#bE(mV9ىN囋{V5e1߯F1>9r;:J_T{*T\hVQxi0LZD T{ /WHc&)_`i=į`PÝr JovJw`纪}PSSii4wT (Dnm_`c46A>hPr0ιӦ q:Np8>R'8::8g'h"M{qd 㦿GGk\(Rh07uB^WrN_Ŏ6W>Bߔ)bQ) <4G0 C.iTEZ{(¥:-³xlՐ0A_Fݗw)(c>bugbǎ\J;tf*H7(?PЃkLM)}?=XkLd. yK>"dgӦ{ qke5@eTR BgT9(TڢKBEV*DDQ$3gFfThmIjh}iL;R:7A}Ss8ҧ ΁weor(Ё^g׬JyU{v3Fxlţ@U5$&~ay\CJ68?%tS KK3,87'T`ɻaNhIcn#T[2XDRcm0TJ#r)٧4!)'qϷכrTMiHe1[7c(+!C[KԹҤ 0q;;xG'ʐƭ5J; 6M^ CL3EQXy0Hy[``Xm635o,j&X}6$=}0vJ{*.Jw *nacԇ&~hb[nӉ>'݌6od NN&DǭZrb5Iffe6Rh&C4F;D3T\[ bk5̕@UFB1/ z/}KXg%q3Ifq CXReQP2$TbgK ء#AZ9 K>UHkZ;oﴍ8MEDa3[p1>m`XYB[9% E*:`cBCIqC(1&b f]fNhdQvݸCVA/P_]F@?qr7@sON_}ۿ릶ytoyמseQv^sP3.sP1'Ns}d_ս=f1Jid % Jwe`40^|ǜd]z dJR-Дxq4lZ,Z[|e 'Ƙ$b2JOh k[b>¾h[;:>OM=y)֖[Sm5*_?$cjf `~ߛUIOvl/.4`P{d056 %w ^?sʫ"nK)D}O >%9r}1j#e[tRQ9*ء !ǨLJ- upƜ/4cY\[|Xs;ɾ7-<S1wg y &SL9qk;NP> ,wդjtah-j:_[;4Wg_0K>є0vNۈ/ze={< 1;/STcD,ڙ`[3XPo0TXx ZYޏ=S-ܑ2ƹڞ7կZ8m1`qAewQT*:ÊxtŨ!u}$K6tem@t):êtx: `)L`m GƂ%k1羨(zv:U!2`cV, lNdV5m$/KFS#0gLwNO6¨h}'XvوPkWn}/7d*1q* c0.$\+XND]P*84[߷Q뽃J޸8iD WPC4>9 *#LC ءzCwS%'m'3ܚ|otoʉ!9:PZ"ρ5M^kVځIX%G^{;+Fi7Z(ZN~;MM/u2}ݼPݫedKAd#[ BeMP6" YǨ 0vyv?7R F"}8&q]ows!Z!C4g*8n]rMQ ;N>Sr??Ӽ]\+hSQזL +3[n )ܗKj/jUSsȕD $([LH%xa1yrO('h=TԫeVިO? )-1 8/%\hC(:=4< ,RmDRWfRoUJy ŗ-ܲ(4k%הrΒ]rύW -e]hx&gs7,6BxzxօoFMA['҉F=NGD4sTq1HPld=Q,DQ IJipqc2*;/!~x]y7D7@u邗`unn_ư-a9t_/.9tTo]r8-X{TMYtt =0AMUk}G9^UA,;Tt,"Dxl DfA\w; &`Ͱ٢x'H/jh7hM=~ ֟y[dI~fHIqC۶1Ik\)3 5Ķ']?SؠC"j_6Ÿ9؎]TTjm\D^x6ANbC ]tVUKe$,\ܺI `Qز@UӬ@B {~6caR!=A>\+܁<lW Gϸ}^w'̅dk  C 7fbU{3Se[s %'!?xL 2ڲ]>i+m^CM&WTj7ȗE!NC6P}H`k(FUM gul)b ;2n6'k}ˍ[`-fYX_pL +1wu(#'3"fxsuҮױdy.0]?ݽb+ uV4}rdM$ѢIA$;~Lvigu+]NC5ÿ nNჶT@~ܥ 7-mU,\rXmQALglNʆ P7k%v>"WCyVtnV K`pC?fE?~fjBwU&'ᚡilRї`m] leu]+?T4v\% ;qF0qV(]pP4W =d#t ru\M{Nj.~27)p|Vn60֭l$4԰vg`i{ 6uwŇctyX{>GXg&[ņzP8_ "J~7+0_t[%XU͍ &dtO:odtRWon%*44JٵK+Woc.F3 %N%FF"HH"\$ۤ_5UWd̡bh塘ZRI&{3TUFp/:4TƳ5[۲yzz+ 4D.Ճ`!TnPFp':.4dMFN=/5ܙz,4kA<:z7y0^} "NqK$2$ Ri ?2,ᙌEK@-V3ʱd:/4Kwm2$'dW<qIE2Ľ)5kJҼMЌ DR3csf6rRSr[I߽ogCc;S5ׂdKZ=M3դ#F;SYƘK`K<<ƛ G׌MU.APf\M*t*vw]xo{:l[n=`smFQµtxx7/W%g!&^=SzDNew(æ*m3D Bo.hI"!A6:uQզ}@j=Mo<}nYUw1Xw:]e/sm lˣaVۤkĨdԖ)RtS2 "E I"{;ōCb{yex&Td >@).p$`XKxnX~E膂Og\IGֻq<-uˮ◶>waPcPw3``m- } vS¢=j=1 W=&;JW(7b ?Q.|K,ϩ3g)D͵Q5PBj(h<[rqTɈjM-y͢FY~p_~O5-֠kDNTͷItI1mk"@$AǏ}%S5<`d+0o,AրcbvJ2O`gA2Ȏp@Z#"U4Xk1G;7#m eji'ĒGIqB//(O &1I;svHd=mJW~ړUCOīpAiB^MP=MQ`=JB!"]b6Ƞi]ItЀ'Vf:yo=K˞r:( n72-˒#K9T\aVܩO "^OF1%e"xm뻱~0GBeFO0ޑ]w(zM6j\v00ׅYɓHڦd%NzT@gID!EL2$%Ӧ{(gL pWkn\SDKIIKWi^9)N?[tLjV}}O͌:&c!JC{J` nKlȉW$)YLE%I:/8)*H|]}\E$V*#(G;3U-;q7KǰfξC?ke`~UK mtIC8^P߼fub8P銗KDi'U6K×5 .]H<$ ^D'!" b1D8,?tT q lKxDȜOY2S3ҁ%mo(YT\3}sѦoY=-- /IDd6Gs =[F۴'c,QAIٰ9JXOz);B= @%AIt0v[Ƿ&FJE͙A~IQ%iShnMІt.޿>q=$ts,cJZڗOx2c6 .1zҪR "^Q[ TF )㢥M-GicQ\BL(hO7zNa>>'(Kgc{>/MoD8q̒vv73'9pM&jV3=ɹvYƛ{3iψI4Kp5 d2oOgd||K>R1Qzi#f>夑3KմԔ萴%|xyr>ķx>{E>Z4Ӥ͋#+hI{hNZt 9`b˝`yB,Ȍ=6Z" 8L O)&On?7\7ix@ D_P"~GijbɠM&HtpR:4Si גt&ngb9%islԃ)Hc`ebw|Ī Zg_0FRYeO:F)O>UD;;MY,2ڨi"R"*R2s@AK/u5,b#u>cY^*xkJ7C~pۊ ~;ɰ@ՙ.rT?m0:;}d8ۈ ݨW>.[Vhi̒;̥_9$W!p.zu~9x۾vC;kN?WƟ+fx3SuKQqxST Ζ2%?T74a{N8;lr`$pZds=3jwlL Eڲ t|*n8[#yN SrA GYb8ZIaʼn8 #fg3i`F#5N 3q_M]j 8E!@1vցP7!|+R@;HspSI]ڻCZUcg5pDcIϹ,oN-_XI,3\j ]ٟ5~' SuipA!C厐$&k7dmhz/#"݃,YqCL$ڲ`"MUbeT>Xuv~4Le͢ }UVM)[A`b}mcE]LCEg=2ȴcmZ?E*-8nhױ1xR2ϫCya` A y!?h!9yL%VLU2gr26A!4vbSG ]ꧧWp/ &ee *w$-`J\ ptǣC^p#_`{ К8EW>*(D{ٛ,[fnY𱹞M=6&$<,"lX-Ǐ_whaE 98 (oѢ/Р΅ 7ցl6618ł_1/=fu).s¯?.S[{'g=Ҥ):d8h\y6]t1T7IUV:;.1& ,5΀j:<< +Y?58In'bXIǣO{&V\DŽ0,9f O_"[l:h¢8wݓ19\:f6:+ .3}=uvKc ٹeS<>ij(o'ciS<{1$E[nP b?8E'xv[K+E{,Qƙ1*dcs_Z'407|qBOgYU|U--sG8`u! qGYܷw;ȌCPc_|(RaIBKb+{P.T! =ĦiTob d<>SHr][KqWs7ѝBYǭ~RR"p9dFg|K- obY_vM 4>/]e/dy,8!xŋ5 R<^mYo 3c9(F?h04SRm+0^PTi-"] O('@BKD6 {NmʐzRj.aQcb^CZ-uvpr CѐٱlGNzIveca=%1Qi F>wTLHUGӃ\sA֎Xpljlv ^tSȻ \cPwίwX"{>9V0ټ_`#U8VdTtD_GU9V ұ{q:ObUi7s )B ۊZlzIA4S#x,T3ѱ ԶJ=rs>Nb: Q6ˌ߉J%.Dl2ȱ%ܱ&6XƟ6qg(USok+Po$lwvmi8W_VT18V =| ub6QWCnY'"*aN08wuSEAVخ m3 o\` sHc# fqT .,ŀU|⦍߶/*~48âF,#[:y_YIpʼn)dk!J'Z5=r&; (y*b*O_ULT.ÔD[%s1,jЅ@k0Ցu֯dtKl$Y5O*GUڇvI`b0ο0~oI`b#FOf_$0!i rS/wvҍ%Eb/Ec|U9F-)L)ŘF`U:VK jeFrԋ7EDYpԽ.D\dNyj荊EEg]bÔF˩ք%EGƶ*NX)Hc(<|q@Oޯr^3>Uf1w;mCja:-1_k٘%VbZ˙#G6 `q+MPU~l!.?I_Pĝ"] rT [eTr؟˰ ]\ h! v˱>5S1px fnk}sRmA>d2UAkؖvlX܇Bz1U_#Xӫ+al H d\k/I,k,ρ|`zR/$@8VU^rcG"E7\qtS:ڝUyy >Vc11*?xYa8U`Jw/AcL~|;yj8TR#s"Q.ϊ/Yrx+u6*27fǪC%+A~*Zآ'ѭnۡ|< a1s\ T5҃FZh?EV"sd!@БU ^p%pO3|B5=2怕nwRqR9~ i±za+HFNi>. EWz:V^&YEs5Ȭ N *7{!fRБBSۘ† Er/IGU}APQT]|XN X]FbKjKdO U6[3TTX)|*H'2U0:VunBl  `5/@ա06VNO8VGON@KgjyK?Wq1egI+ I.*F~L!Gf"LD&U 6tGd#fR*c ^tSLjnKS9 Ȼ \ >lr&}+̼d"I va,Jm_u)d靕َ| Vw85F3Liƙb<;dM-})C?Fw*IJ_3UG'+¨[9| >80\+ xJpΕ`p~mg˗%F Rg(6=/r+%a>w Ohght uЍaRs ^d6GXAf?V_mW puȇ S:tŴvŀU#-*mZ5k5r)_x*8ͼx@(k:_TX%[paRu~}#Ѥr %A%`;MxB[CzR怕#H% }8@*AM.SEhd,rKrʇ)br\+! s1CtӒNc_:F*`Nv;ogQFa2V%ZniE|nZ&-I,t*ώlo Lhnٓ'Xm R ˍ-~ά}hs\5TT%~am.>!LcoJrKmqvez܅E9t6FZXgsreHhlٷ+ [}r:̓?W~e6>0E8`Jq-(ed;W¨:Ä&]䒿e;0:|$Ȃ1L-%;Ƅ{dɱL;V[bp>!n&աIJX1$9;[?- й vRCxKVV+#lj@_RL;IQ8ŢΌXD@Z< (1ZRÜ:OUM/vư{'jYXE4S/8 7: `/ +G\ U>]B2/n2=8) B gJ3bcKo̹ʇ\B~Is 2sO/I!}xV&\b<9$4Nve^آ]$LGF@LjKٕyzH 31Հm-XıUXF|\A-2) ' RG6h?āUŔyj[j_ӂ~ яA弆^bDyzǖQ8`jXbsK?l58,?YP5䜭ve9YFznTEf3Ja\,@2,?WYؾNr<V` =V[oB5!Z\ļǪЎr8@*ucѡv\[|s L-+y{5K@dzp`r"mũɸHNd"yc Pu>x2;W`_VR<aӗ&D<=h-Rר|/r _ǖھcߖ]G@Ն;UQG1 '3Jە Q88ASUȿ!:WѥLf21;d9OU᧯MR3V:<}xXh//T+coY5Ȧ4/m0NE(G2[+G~H'5ipӘ͏O +Px SPp.,?Uv|$kmYE˾y6qf'N3`I6%)EoIɒce8Vrp&ŮuJIRQ4I';JP&So$8Tš*IN% gCՅ$p Q( xGETjTڡ)8TET"͗k=*F-ˠ)zslMe=̊ۓSyOO"0n"KV 'R<(CUEd!{;f<.CU\{>UTZE*8LMwc7# yuU#SpOR;<'QpLOLcqQM^b^pyOH.s>.>9NC@28/g:%eXޞ񔔝}SRv +%b`!cKKH,t__+cB3_WbS̎\ݝH^WÔ[eq1g"~fm(˄V`eZGv,+MZ6 w87ƵLbyYO|kj,z|9ݕe 5پC!QA5)fQ<0O\dCLTt;]-Y~h n ="VR/H'cqAd$~S)ס+<^ǧbE|Iž"S[|(EpL_vrwiH}' #\mFu e(dH] Ocnrܾ4KS\Z2ov͟yP4WaY A6Q1`כ+\~ }@rmV_ǴzqJerS$_ ݕA9KlIP  kyV^L&=zKDe278 3b{wIYQEwRU#c/Ս e--Bwٯ c͚Q&se,*y" F*)Y?sPrT&tQ=S>U (c:xthVEzrGw<&IYk]ay:y9ó%Zةed y3ߐR]FԔ_BT ~Yf&h`6ުa~%$MQ (Mx{iFguv(G?b@b/(冞's9U -xtTilX, QGhg O~?}:#} DdlOMy6F?uW5gcJ>28AFHxȠ=K#x+˴WrƝv֩P u[y;d.Vr Oy,Q<ztux{s~.d)ֳ Ng(`?_0z++اkIalooP?:/α.$!7z-QN : oIMZ'opv#?题RE,dڹO(_@7`s3~<}wB1t+ kvzx, FX.i־(o׋w%g.lK6CvF_o†N.4jMNZ %CB ,d:;VqN'7p` ,&х 2SWma3>!/T]'tނ6łeASAk=ooq_䟤y*EeDߐxMVNMQIFJx-BMђQwf[7i@ 9I0|`ЮGh۸t 읚CN7 ɤ9Eq ڴj+OX<$ksNxU Mht4 aLNz_9^Exʞ?ղ^4=) 9yawND b6wLΓ;X0 `קկy))F?TÙk OP|[g$処YrӚޝ!ACʼ).2YTq"f@Usċ$0 XH)/nz!` ˳)պ8ٔjPb\֤ ~aqNbϼJw;@[. $(.! >׻*S(cp' Nߞ" bZʨ?@= iIh]7s-pddWY򖂁r$7}$|lu&aDffih\ѯzd20?>(844Vd$ 7{fyMeU#TLEku/ŢH1 YFW4'3iAgڃ+*Q,ӊ&<|fECћZ.ۄ5cO!3X((ⱨpF8AFg&E +<ԝNJE|ߺL"W'$Rk qp3̓iFI muh{򽴧Xh16T8>^4M-)H ʀb|Ϋ(W&` _`$jdRa-مVirֺFtĎrGdŁe?i<{!Q: oL2Z$U^nl?nolW@8En.ou ]KIóFݙoO4> $@6D%~cq]!<+o(zY7pGƕo:C q#N򨍉$7q-WOxz\. 8TMfUDKY|wچi|U7Z!0" j!RA3‚2f4='X'Cg͝vOS"lgCed[Yl"9jE)+zb5u)fcٰ =T,>&SwEukXwv$Ԟ%U]0^2Ӭ1˛J])Ug-d N\L&% ͓*lI4=aKVAE5<ƽhuN8E>g\Jgz:U113/M{=&tt|z4[|ycTdzѳtNȖ{.џ,ΌzvtDg|Gi~Eפѵ]=ߦiTjW>4jN!&wY"!d z.3w$F,tmA,h+ Ծ΃&rq ,{#US2l݉$^Dw=mtD+yuese!ä^IӞйmFz8KQChj`FS+ԝPAqTM_hD_Oz /Kǰ w/+ #ffRM=8\ )3ru+r$Χw''7aqWkM m::XW@h.Ԙ 2ۮ#%j)SI cE_[N|CѐilՉqFrHJhMIƁAJ3d^c[  5 b\~Q3@,M\bK.ۦN=+Ym)11s1e-Ѷ|Ej|W6 <ꡯ%>'yuD,i2؁ҚIC1avt"t--a"<U *!h"IDl={G}nխIcjLrZx*vRı,?0vba ΂Џ,0dn,"b]{Ґpm{&yɴVYI jzu9V-^%ц=ݥsbH12jlxtȳt) canud?!k \ELp&r{W*MICgЁ%-;v?-Z&YgGbGO$Qі D0]SZ5zzxu+0=u7&˳P A˷--:>醱+KUT+sG Wți]yRaϋc1aou4R>.4'1Gwo3É]c$Qp|Qs]Ya@hٛ=/wtV/t0L'?wII+[ 7@ ӑ8[Ѷ84x:LAo{Η<~ 9[g{.#$C\'WH|]L›0|xQ݆'dSQ\C>XT\~$<07$xKVw1SSTkų>'5al>EZqI*5`t(̋d|{vCMUCQDOmRx ui3nhϢ=VP#X}: 'WpYs`=9tp~?e'`(հ6gG :$04,LQקC# f:x, ⿶ QXm sVpd%eN" գiOLNdcYn҉,xՠ4{D,v!i  g B%g:MhpCh BE! mYkoDhΥ4쎍d  Pc"zGB3\-C#9)񭖑mm 㔈v`!FÝX4ͰB$JB[OP4lbæ|!2hD4G(A嘻ZF^@_$qStġi4hp^€4fl MVE ^knEcj:.hXLЋkd[ Iu-Uf mF:84T&cKI($ h}ml"7r¡E0ۮq{l[u7o,35*nBM-A5_1j9Oc-~13]6J%ba(`5V,~ }t 3vɟ`L#$Ys@iO1i i1[G`fS.s̖EFN\z-d6^mYf)l#>sfiL<ͱeKP;V9|x ثӈ̑tz)}1 Y=5Elq*.&!aVdٞS ޿3SGHrNˁjvVX)I ̈T L+R /z6A No-덢ՓХ*"# 1,<,Qނ_!c\OSW" oYQR[+%ז + @uoH\(?**&$5ֹIQ =4s,} w1&ON.~8͎s?`ߒIa]OsF5t&KtNwa@ҼhPO}?.ZfU8ez0B˒%}hA@G} 0/."RfR pUʃE/9qšrrCns Qu<('Q(k8>,dF˃Ӓ<,ֆ얆 Dkz.ӣwNg!ۮƞ ZS960>MP{dQm*3]d/s,$`Q$d]q}?ѡ CX0bāC$<}#WaË~`0,p|.L6vlUwa4ͯjoCP?3rL$C2IUeQ&y 0'GՕP +:SoQm8ϒ P䢍SP ҟfiLJg=}i;~^"ϓ,Ic蠭GDe[hPܲ^8UuTV9 !ڱH>L [^:N/Z ]t<]b 哤?E4# C" MރwP7p%9"ŭQ~Q P /3%5mOpl܏4EE0 ֲ̐"?\=pVog`AV  8a\rxny( H[8 ,XG0gLX>m9[#Gh:i!?1c.U-67dP8ᔙ_#!i#xX:~yf!N ?RMθP$L(:`Z͆I2:XzRTPSe&{!'i8{L_U( eEP!SZ k,}q.L\%zE6=,Q(i&~B( s4IC/CLjW,߯\#B1nWD(֌G'Z3k[E 0@3XhLHI$אQVhXRk \BeTƒ> }_oZ#~V^(/ brQAzx8U10)}"B0DY4Kv)THvž_HG-Wi?ֽy(C Pl` "uC{EI@ދيB(> HśXoqim?eEM1LY2u_LQٱ@2r40Ġ ^% zA>*Hn>Qo_]k~A-k6*R}`cG1RjTYSx* VsO+2c\/D{F%ݴ=.easXYƮh5pmՒ @+G @m׭/<aљEyEN%`Bh)VHKʕ$7i2k,Q :~:ɣ<уS;@]NNn,`<TY;@0R۲K S=H~l$JlG50mGNy(,##M&9@.2|_ԍcՃV ]C@+ S{0{?h7٣H)Q`m+i;@e!VYKb2:8 iL2gk@-n & "d(i q`Ir<^qz1_?#D3fR]8IsVƎ 8cP]?\dzڕ >ow6q:;٠[gs8ֹIkoM/8zroODrb^Snr)ޚԍZ1n &|q;g/<?=n`̖|˾>8dOg[F$f~wU%3f>\Eiq=5H&e} iݡG?ن3/vj Lfu UkE 럧%<ϨJ/"_69W|/ܹ|'3e~_L>KHpxcKϿ7@a qOg_ǗLs6W^8Z~<dzwNEkwf;ES6y.th-(,j>Q_7P/<t.x=wxSGIt%g~G;3>4Rw~vwp:Bx7%&rTȦ}<MW˷s2_jVBрCڃΆ\x?CgAӁwȇ-6MMB|%8&8R@aBA;%A(>K Ez`)X"H%(. Z$X+1El6<`=@Uc\D =VWXi 22EAomX}VZd!e|q%rrƳs8ko(bh;Xc"yvZ@B)9^/A$wzǐ|gCr<}I3nt)H?xRNН` Dl~==U!W-ά(PLTPhAT*)#wL~חF7IF&۷p3I36[?vE11Y1jlS P,]Ig?CǚbGh@iy-[5eHDk)n*;aG zcgX$sW'. D4!f]l+ۤ]g]=[ӈiC概1!'̇`L.vjKT2[ xk^ D8θz'l8o9 (O fTQq[ Bx:TƖ G>8r!o*ziU|TzꂜJYKeێTɚ Ջ Q~)غFQC(Ui j-!|_)e1Ө-pHg M߰dLj KBx@?={q׃ qcuhW/GQ1)2h45}n1Cn0R.YE[t>s[<Fojr溯ꢎ~peκ1GH\I v~ۥ8) k`ppqx̫> !r-\XS޶ jgfQB!H:*w}ή75<8Pz2mu_E M v]˼߭h^u2E}|Jxb _+بT?BdaHf̴P(y7:ړ^HAYCibU'1Zڒޗ%㷵貑D5NHBS. XurIOOMvQ+;]Ҫ '`58'`&\*ιGs[8Ee1|i9ꢃU@m肣㯙y2" `~&o)/+@(]Ǔ JiCzo뜷GPm_3>n:}4Zr89ٖW$w5.'hU։sK]@k)+6|ZT C2Btl>>l[*E^B _Z=;CHwhu\w7sfgn^/[%  rȠĚS`9D녖{U8ؼt1l=W{i9TPy-uȖXH^:]|c⅙NٽmɖPɈGwwj]BuL(K﬘5&_Ь:]x︯J#(_4+s[Cʰ|\M^cW{f]5kFDڕ+|"*C{~Q̪I~DYv9ȤeGK&iD9<:Dtlj G<׽NykYPrsM7ZwS\!Nj&rSQS7_;MX.]f'(I̺עT7Eoxm\[ƪ49349ָߒ,K\˚H}Yx79c |jH[5KS()ZGobkJ5Ҋ5d5(I.bWs05XՌ7;9 ^%ƴxG4}`V4gRMe~I.7xEZ@B)ms.<"6.8r}C[͸Ի9I'`ͥ/D5ةS#TQ bܚ\k'*^% [}~^wQMs|WeQk{8к&\ĩ]IW8wqpW *ۊ )9dFm-$Hl\ Lv'ŖZgޔtkg,m`8wSz[c^bkگ޴yZmp Oj)yxJ`e^FVt#jBA)fI?4Ct@R~SؽY!kD7_oxU֚PnH]ON򨒯%ɐRi;14ʱ꼨{'߼]ocX߷[i6xz4_C} 4r62ׇ9=bGOlỸq9l*pmj1djIŽ]pL*/{ƍ6R)ۛUmSyT F _KRr50CqIj8G%QhТMTXlFq;61i~¢mX gpKۦ AޫI&Jl-yfxS B4 ǻjXoDR-\iʧ#/;C*w׺nb''&:9Ya=ZܪTtV37aq-C$ӞWn s5IwN7J;#_uզɪD&_WXncp*S[sݥ};- ]S%.ƠPa!ӔelR iS$Idl+DSQvӤ4Md۫?{?(-A@i 9z]2l[p" tW쫜ag!&4Myשj٥nTyiie^~z"_<@^Yv3{{Z%Y7 8w̫mպ/e|]ss|gln ? ?E"菋hfeEky  &;螶oX(S$QTX(=_γA8O2>.'ַv^69}6dp= ?EN<&vQu ﱢXx[>w`mܑ7K We|vo RثnPLKP43r*0q8UtYx>pףw瘎r/; Za~"K;ѿ6?8^?:QPڲlOOB}zSf/ ʘ_ CI;׬BnKrq6G$Qb&&`?/^Qmcӽ(?uԺTXCv?xܺ\Q2/gy VI T)vXybL."_O^m44ܙ PiIUg A/Nq'}'8Q弹e*@f!o '~Xtqt "O7F. (u+.Į79߫!GSewXXYqH[2;!d9VӖNnZ!tsC{TdPOѓ?W?=[@ixC D4>X(a1=M; s&|*K~͉JNnК'XƬ94f -xBKۯ>q-`(xg]Go](k&x)ͷuhÃ3VFD,h(_zpDž+9D];c") Vf@ o6h3xld 5Iʥ-_;kf۶[DiaV5J7(itҫ6U7h>8դK֨+L6J[hWjIDcBX&IB4H\$ fIeI@l4& ҉= W+ jT3 -h75KhVԣ޶L2et>/w6 Qs0$/0zҰ0֥=''@&"FDP!ӄ&=krʁ/E%^k~^W W΍e4m^PǁnݲfcyXENI^es$8| 2kZ'a,lz?I;%VZ5*=pZB/kPXe9KbRC57T cP?RH.yu TFȎA} Y)]lgJC4 ڗ pR(WXMR ҕW-i]<&?# bﶦ˟|ܖĊaUuv'}ٖb,xq asp)ށNseUvc)~'[3=ʗe퇊8TpR>yLGW4L<~ BQr% (juɞ`/0=/$JhHBPe-\o+9Ah ׬86z\?~} ߢv8n<Y?ujG&R1CKxހ5 }7uCTR9ժAr,JD E#d;,aҤD$BLb F()_I,#"␡ `Ԧ]J]KҥRWYzqւ^+y>kׄ.Y1g #sPd S Ĥb OcZ]"|VҬe\!JUW(%G׭9^:bqSh9m*I%&LQ;W܂oӛn.U;xU¼Ea^0/Pmc$ }!1%lE0];r#ܸ6 nmZEXv]{beLF}r}Y`WϼD븛 PW7,i( t[7| *KіD`+_L9D!jm]`YKe "ڈ pA4rOT'$]wxwb j}'r2J>#7)y ,xڭq}6$% ˜T1/E22A-GT[ޑU +6Qolo`x@l g^hO-Pĝ7 OB:^+ 6y MТ+t,#P2Ċ'm& k 5i\ nxtHzUsBOfMqBT 1Y/'<S&ar\lS єn-v;/᫣1iJnwYʜW~k[kG5%Ru}7! LCe>WiY NqmńjP1~{Rkhrg-Q=8Z.o5|n&'J@bBVʕ'fXdl,#E?j1PʑztC/8M]}᫮J@reFy;ַ 80V{yp7BC?t/0F`&K~4??\UHDOUcv[[8οCgmg6`o}wh60Jr?u|8,"Rj +WB%~e>1x_m5"q׹!@3jg#qNNBG5C/N||`i#$8=>vg?{fGg}n\wsuwюN /;@ۑ/Ww~VMݼu%L:GS37{cHܽqtTv ? Ix_@gf4bް'v +%.!;:2X1Ŏ~0;h/9G[P{?;_̍;xDf;7fWuށqYT051k35ޟe70Ih=zi!+D`YS)dJ_E(S(qb l(O1q6ڸ])rJlUH"g0R$LrT \H .ʥ%+gS*Ȕb&>RY [f!B>\W= @oGIڊ\9jj?cAk6F K+đ?{6俊`&@2.H2f$. J}q=g0rwmYe O'eWb=b&^Jck^qR>5qkiQ$[UGr'pG#A&h8sGOσNsDPֳO€1,:MԣI$gcʖL +RBL[.T,нMO*V* -SR1*;%5iSZ Z1(+ *)1q/TQԠޣ)`HeJ~i95f_ 1Hd` | `,ѩђycMOe;oIRGRК쓪CK5I^ ͕8)FP(Fۖh@uBcarDpb!6F&M0F#Bo)8wE\}M&H H[1pأXn=k\O],k7Y%ϭ#璶Dm:;6G]c8.U-~s["TܦN 58n1bz{+81^<24ߤ -3Pj vl*-Q-%*@[S8wc>LEd=$0%s2ާB#͵wSmS>Ļ @rb+/PS¦4 QiQcJkU`QͯU,k~5ܴEY'lJ <KYj1J0.V1x9'zc1I-h59dqMSh7bd)ZAOƴeҠiVICjYRH#ƴFh=BR G<;By7g(E}Jz\ל+N>nHoTJ[L; /#N坥F*&ƈQ4*J2 !Y:hv/P*"V%;"1. 0gUĒX1m1DKhX2=.xLB<)S> L L r 4pg`ĢEc-X(r$>*L92 Y 11Z C|(AKMVqqݗ*+&b%G5G vD+GJ]1oDsSPd-=܀P-;ŅS0 bNi8&Y4NtS l J[DŽ +J@SAg5n&ro%o5E"Zs|6E@ m`L0Owvݬ< P$9{ .ם "e5>1p 模ݩ0a>FY}ƪ̜MD-w.1t~=iDxL{\9fw90xv{%Z䬱 Lt6V\F6ЖJB@JNZPS!U -aڭvD-Rvn."Dڼ;8H4^g0W+fZ .-sSEFҳpբiL<4D7|PHѰINVTK#) 09Q KCTη-_~UDEH #F5tsТvU`uxWƩXYSnYǢXQ6?mzub {3Ѳţ^A?z[,/qhDgӛn7jX`%\xuEIhpZ|j9aJxW+FJw_UO'0vi7guƈNFB:gMќetg TK=VDu%&Ac}q>sսQS 9MJ Ec'P1N1 M-њq߱*ZP0q&nt g(RƖ˪P6^y"`p[f h1Y0g'璡G\+i,OThC'o?!x]6\e]ws gQ WkZ>/LK-/5g۟´nfp#pnμ%[3ޛ+_o1}4ﯽ?g(-gDPt" ךPvxynoxt\Hhls*yFrߔDd|M<ڑ.=Qv(snqe,].?|qܱhIkl/QZO>w݌1%OћKYg ZTЍdܶRL՗7+zjc2﮶lzK.;c1ਢY9>2l.p'q&wTсn!KVM.Ƨ)76 PK=â\gWi؜sW;KMs f"[|FIv\~gd`ٛ_OبQ{V6Y'Z*Ko^i]#!L)dNp:6z>7dR?sFF?#/6rmlgF?AݠVRO9aٓv;agS?[P'u(~O ]{_z:ʔ,*f'{ _tݝqGzT1t *-DR5{ 0Ϯ:;WCζ6tF6s0ݶdC3I-M1\Z%Hb&nN9vwfKFӚNxrqw~7y0r~r:O;9WN@ynȷ B?$8G߈#EUpݷzQ(Lt%]Q vIw橣]ţ$y\,Qjlπ=4=r$ y R?XF9G_oY&|yR)yISh_mN2Zt2K =/kNhAoE@"ECŽ#Gn.B{ΔGKk.xak|ys=1fS6v1BdJ/JF\J8T#􆶌Ś;#ZiO>t3 QÏEmt=6~?.|Q{1唝% @a,Je!b/րt c@9jK@U}+4.M:}/eU/P=':|eޗ(sqz< ^~ky xr1ek12 " Gjo^ CRetn[0o+my_ugxu}K/ /.u3]-b4a|1n`/Gc4]I$PX $$НNniŧĹt>֑+Y8W(#-9<浌z+_XʍQd1GM؟']!Ou|֍+|w}`<σ/P6s2KѤ}^Eb1A\^-ŷQ!ll2? zylPӰځˋNHF瞻EYt\^B 8'|g7N q7eu5J@7њ7 Cx+:sSqM!lPKJ)GUQTʚ-%(.<5kKcS0*R?ԡ M^YLS k*x)>.{x5<cܖP)]H"6c 'bu ^9iohx/]w3֝}QLZV9`/4" ڮtXRꨜ.A^} ɴ~dl7 !'c0_?zZ"AX:u-ܬ0gv Z$N4ހ7^_V?f2U 4?<LmE{z sZH1}bEIF!G9;ݺ^dՃoܜ"bJC!?W555 F7,pɎ8SUC9 }8,Bh]!;B_Z'sN.Eڸ0 =|m%/%4]z I>:<$R9Oz~IJiʍ.MK]$'T)J,zO;lyHwfBapk,s}R;?uN~;/b\y?1TH\)ף^]o[@wڣ=}Çe~~e6pgpOɫ'G/_#7K;M8|8Ͼ^rV~0tKM_ް6f{>?Hfw;oBb1L[w֖_V|mɊӳQ7~*|uk|}!C򳔍⛢ž/Lѯ3{wVs~j]\n6 ]Y&dZC)M.ZT}W]KI+Ɯvюo=aΞhij,JTUvߗS%ͬt& m9>$bp.ijHQbur*02ٿ;?o' Ce? +L˫?A-S4imw@I9uPıOڛïkuZ_Mw,\-`/YWa#O_~OPUfc ŷ>̿6Ǖ7|(Ϫ :,dmV{~ON`|dZ-6O'\֘hm[ڿ|Q|Q ujk!/W&3kp,%O[fv>Qe;AD_(~uF]mAU)oȾM/.jj<3'gyA8'){{k^ʈ7)]"ܛ?a:4r۲=4u/=ZnoZ-zʵLpu*G 3"9ī`H![㎓п1[_aOvnփX5Hs6$ Qeώ|yBXUOGP<Y 6 NR$BXgYQr).cz2t?q!_-i"u/ AX+zR.$*cA}یA}ܬkb pX2e?,ihHL[/Kp#s`?%hMDzt!GBZ>rEݴG ow$ >0NgD阋aRHW \̃pqǂlE~IQ>G.}䗯Hx`#FY$g,RŴr3zO@c[>h3OO'8:|GU*D^S B*XAn?Ay5nXtf$$,V~Sx:`K@Dd,qp"F#W;twGYvéu9W"c4espbyN|8MJ'|~,,DJJ36]ʁMޒ;Z:▏uQ_?>+Wx$EEB7RHGZs]hQGBk(o)D].b3B=r'۔!a;PǢ'D?<:}-6EO*N$[ LdܖB#6nX-NN*|{t=]~^(|UaQ+kl"m* ]{ȏ47[>)TՖNn) upy<?3l)ťɑ 睋xsQb{QN?^>uI^;))8[8K 2pv{t/2c!eqˀh9&% U0:iP7FBz'2*u rM jXgy&fT5'%Bj0<2⋄0ĕ$=:Ncdt - cnvT7b;Twb(h+#~sFSH%IO2g1]qu36jXtrII[0D)[ @іx6geȅ';▏֨w|^ez`J7|D J(K}lD<۽lD- ϫXBށ ʎ}wN q3i]BD҅ y&(uTnE*jb ;: Ғr%Iw" sJ;ɜVFA rc!0z>׊[5$DɈ^Ipt gCcZ> *ndWdn}W :PCYl ZDi5y+|x{Z>d&XK[uH-LMT9 4I[* v`Ah)&XߠyEӰYb'`N$Dnyh*0ކt|eٟvU?Z%,XRI(Yp5%=:'$BFD- 9gMRƻ l',|ڷHT<a<(AhI=_}y3Zv6L4/;jFn=8ߢ#dtoQB>a{*׻Ƞ[vK.A ` [ 1[&J 6*K]xmFŲ=C^CcT9/>4pᠱ.y#'"Ɖcl67.@h9_iStFYD[%դ NңCPB H Yj-Lxu1D'~rՇMB`9duvB^w%t0 T&gŋқ)"ɉhbAp7K0-))[QWّ{+u NI:CwO|wC(Pż}JZ.띒D!F2#eԃ?xbKBg!u|Jᅐf盅 :;X["D&/C^MX/=:.sEGݞA0cI3Vx B5| 93mhvBr% 3.̨``y;^0Q\ NX"~OU`"[QPJ6FC"0O͆H% JY`!fI#SK6prrA Y>fgK!n@?GGD21_]9GG|.VD76Rt%&pIh7I')w= )(!X{k J_t䶛h'|όUYEՌh%;/Rk9c\׵@- Λ[ry|Z.q(969񾣤㏻Y+񑄍_jKO<l\Rt3dd0MT ~tjI1zt; 74jesݒBtZmjVQOmW{b>_{~ց7zt\HTLLj0--|ii|U{t; تn1TI:e KAԆL'(N;ssCL rn8Z>ݶQ>-q{,>,OPjE߯]Z1=#_esI  KX7VV8I9w`IIqB>MVir vK|7ϏPi-'j`+7|bQ`K h KW%e\$bVO|Qb GU-gD ϪoJ Azt3RlzES%ܪHLB&5?tܐ Z> ;1G`2F4,x6u{uQ: !{Z> q߻Qeu{fdkjC7 g6TWoW[_ Fnν[T-y|BBwx¨6۞p݁3e~Y/үmw*6Yj4@;K?O Bh|7"&q5Vʼzt4 c>ϫ="cʻ oM:LƷ^&[؜C7O2҅/:, 򑐫 QJfӻrTp S|3 QB_.``Kfm)DI@ 9BFrXה_ˀ0$d'lOi#".l[M [-C ih!ji&a|Z#yPXXou:Ncdt - tVH #>YCA t*iqM~5 -_ ;M 5EXvvCS=:`F3 Er I 30|o4e7_TM~`0+-wsd pQr~yYS>hRrEOFpT̵ɻBnl(:_~9b>T-s f03^$:q!BF7.D- ];P(BA|fuJi=i 5O溰2|,Z45KHs=D(g9O;nV;]509Ezu?v)e:CQ㷶r]Me_U~.#R@!e/uX̽=6^.&dzCÀTR똸=˜M̹@ u)|r1~ˌyKMZ>f=]eB?-w]wgfoo8 4cQ;inqKNUA6)#a .tQ) 78$geq$_Qvkޏ؃=p^q U8H5II=rߝ $2)6 6I$2+yˁ$uECUvf-DM *J6)\qU0$gqǏUbE_3[ϯѢ_qKbi~ rba\=5N@UZ- bsd_߄>}p FE,z8n+6=$ѧ'tGƙ(n\_zׁ1eJD`hqEm=0qt7S5;{KCP~3# ^qfn:gm3kfJu3&[$o9VmS1ė$,X *PהC_RЊ`VՔtZbK J$!ܙZS6gԫ76_ kZL_z#Z!ų&޼~B[Uhp3Z!і7 S㛚icZp4^.ߌ6|esJx0?1j5JƟUH=8 =;x& X/agiLG%nG^)8 Q[<ɥS7P_F uuuuq#]]*BU=_~jZ,ƫqjSU`d= Z*Uȩ{쾊n*Q6”DUq`9*E[zg,E|{*ψb4fةRUk*+9e4v|A4HG7ܚ<|%@*-CEq,Gu_N|%_ Ӗ'k9;,}-T|Ngh:P1ƕQz%9_ƂKi}MNH"_]Va83f<2.j~0d~Y,f8J b>̩˛eFYriI2Ry N|rC UX.8/*^VFQ-ߦg/!8F⌒UXr\1_-/nj$Y-]kne|xV = )ZuC6Fu[jc…#/QVߙѭNJso5uyx1BE# Tˇ/4{%s o;+3R(hw4QV̱T|S]ß_mv|w.[n?q95[?lA54,w61@wN }?'$O_"F2gx=μ-aS C1l9;.Pp().[f5T9$ͰNfc ~L'V-PG`?[3vVc܀b=`2sxv8@GnSګ\gs%HYj%@T^XJlndh?|[~O1vnʷ(-+(oH0`HBa ھgKlh GZb|"shKЏ[25Hv/{;R1ډG^]Qxj5J~2A8'喛p3 cyeGcg1rgYڄw!䆉<[&xH(\:]hBXR'+ 'kјa9\#b=FvǠ(hLPfڔRiZ!i<`-l%AN}hzq=Cl=JMvAls8|C^G Ů.gs.8S Ѱ qn* ZǸߚ[L:Álm|l%-8`o%xޗLza5^ 1ض D2쌔|<*W 1dKfrdՄT]OlumԀ;]-,%&lw _OAO~p ohG5a1C=BKv@V:]%ZXk O1nґ|m:)Ev5uѢVt62 a-PC"/dL TR|V67rZ eE3(gHS5PM-|<[ՠ#i7%gbsEé !d~; !ih& >83hsի2F AD}V" 0nxec`\W4c92jAezj s1EˑdahY&9Fplsz< mֵed\jH^ʘ?Qd5L= -m(%,QVχ-3&s-*#|ۂӢ&XO4uw918ʨΎ7KVyW^צ[ hvKQ fҫLKKUI$I5N2t&`'9s2Х\JEH_&R-&iIމbn;=07YjXڭ{P*(d]C7f'_kvbхk2p<%kl f4kI决THaZ]58/=I[p^Aztj:[֡X>.u*c WK^/ѺΞ,'c ]˽K]` ChV5t~2FP2jD 9 η:0~2F(75uDwxwE*x΁6D4^}, Zj覉mcŮ&cRR i) (?Q a"wMSnm8xt 6*b7jeo1S1ȃq&B{zUӠP.Kz8I{wn8z~ 0羴(ֱ 59!r*HRX!EILBd<_#G{&5tSp`IM11ăZq8azl :ҍ~2Fᨐq8+Zo _-H w~u*ʸ3m|=-e9OnS*cƃZ@Yv{bj7 }򜾗֢g %Яg`М׉ "X1rs"|m#f88J8/}e' r2m," 9ݰZ8bOh||[15tv619O ƃr$W``|zwgn BE 05pTf莚a3DO .g =O("hXh둂/Xo6$[ef]mFw ;0_kF@iQRHÚ< `w,.F92co3%5;C?cgyTm[wͫeܧѦ?P]3&=PI^Ȓg$7Dvo |ѯ<~q/ݧ(t{P9f*9)'_PCg,1Z&X.C47A[")XNIhtƃt GG>< . ''&$(@t\8-</|Wkuf}Ddw*\%g]~dnPf͍NW2M$vWsRԼ4 b>¥0_x>KmҀ-ҍ?\pO02jNQ'$rWXR䅮򯛙G-DS̏8-Xm7N80EP(*\w` Tŀ_y( y{W/t49(LVab8@چ9nu, 3_ڈ&)9'-cZAb=vzzg[D6g(=':m!7Fƹ ݔ-XyC,xP"l)uta$6>I1F±% >&LDǀ E`(o-:s\)<7|~1B8jٍ%/k/y~c-wϜs+ýXQAg/#'#DcMG怏?_qwsaR6_<657^|lK"K~2Y,(^?6b?OͮAߨL*Hb܂:H,ooo}湻j?ݕ~2MË8-ڧ ~Hdz)ׄ2}^5P_~jFw_ ~wO8;E>/oБ?_7v;+əM(~C J*G㣾_> =TR,|7m$7.G"+QHʐw`0pWm}ğn|"aaI<':1[XV6uS:[~ՆWռ>sןFUݛ[r4 k套"\gΏ&Ş'VwtZ^+ ^! >,[猑v!$%̰( _(!E"y΍Uby~n4BKA j k]iZoBd#˘Ʊ#0( C,*2r)bO*c "-94ZMMe]eLX6@ ,a3j1h k9Fq/;N(&ժD|b][MI]IL&N(0p@<8ٓƎ V.S'E/ZN+XN4^T!Df;\]g InQ2 Qnrp?saq4:IeĎɌ2Ϯ]m;BER7<+ҕA וtP?UTWBszmkdm/e(wYn8(H}r$2I$O\8".;u 3(5SR7yŧyhMMz1cDa q&"Y,iR[ME-ҭ(p*i܎i O썪`qU*iE0<>:M!|t&,.,g{Hmh"RyRæBZK2#5DL5qHdpN{m7v1OmDwJ9<'z + l~*-*l8?P['Om :UQj ds8QS̲ky|~|$Fq=POORJӎqNDIu!Gaa2)-ӆ>t:ݖ*C52E*RHQ^pÊr!!&"."Iӧ9#J<2;.̗#O`9p8>1Y3`T^!)Đ"˽d~N`2g-p<5ʷSwl" E]|vciE2Y?[gUl5;?r ~[M\ ՔdnoZJy4h5z40_V~t_)po6{\vr853M&Y>S?q~So!͈ٟ-n_+\݀ROoIrycZǻqY#4W PlNR͖\K IrӢ|٘O.*`4ҝIw9[һ7`9k&>;@nw6Feś0[Zono cGhɲ[*PU2t@*=m&j_a_bQru3 qS+ =0olĸ-K$BYשa"~7|3;筻D@M?͢0-}t﹛ۛ';1etowش:}='/N4Р~a52^4,#Orž{}vZ滚u]nۺFY2\(R$j27!s9(y`Xc\eZeu#vܖ Rc~+ʰx.RJ8<xvVZ`MH0~z7;wRق"NzWfȰf^z\F<*dQd3/Eb7Ȥp_f?wnF| >+46@X"~yOCdyELFpyKL!3;o""i,'h|eYxI(K5N2ԡ-yF/^^*8gx%uWy 8L0`TŤVV^k AlzXҒ3d{M]|5NV`ʔ9+:gjzbL]ٸ+WoEd핵 = -=hI KnOYJ l`7[峷 =֗KJ{N(R VpgЭil%|\_ AUBn fw+pE]{eTwq/WQͭ a|)S:wHNaڷZC;>=yPjA[gɊW.#zDQRU#3?4'j1.&{52 J8{~7HSޝc?;=A]+xDMh@k;_GM0eHb *( 8 l>EPK:#і˘L9TuqNMF$GZ1 K(eb,5~C9ڢ~W=eXa8C0e`eqa}!CHp@l_& оՏ))v}pHQ}u`mEXTŖ4zf.ft5VF9RLI̚=Dײ%\;Cs Ζ^sD`Z 9.BQJsT|5n{"&-I0TYw3Ktk̍CYoFs"S<ƃ.M\jS|eL3{]ͦ-[ a Ow;:Yy28rHú.l̵*љKy]*9f&XoЅs%Y~6CXd<\~I d E`tC/sXʙ/w,Dg  }Q6޶áJ9ޛ*1p2l(A:h;-{I3sd̵pt sQ,{拃+^O[(n*/D(La[ˆ iQ oE"ٳC6M&pǦf9D\2P bL*N=42;J*1=&nS8]rd9E!ai$(D& 8"0AFI u9^2^3vjPH%H80p`$`HK8-p(+ӑQC^Z%i3 sgWc;!,сCf7o^֠-m`6/`qZk6L rd jH/7@ 1ht1  SmF Rڬ θ@ƒ?bLUI`Ja B T8F? :ĕi: Je4ezhzW`Fu ݀z+9V)!Ն 1W8}7CjX& lh tt'(N6 GUe!ry)? Xil=4|3GBoKe!_tz㼑渞z Ȳd{hxf8NF=NYWXa@ ^z)LKc܁]b@ D9=zR[%+ݓ2H#zn=g8'2-Ou,}׍$|Fx)WH8KR%ܗԤ Ӭ\m=42!'(Я*lA=g8>jzj G|Im)-g+Vܳ=soǮX=hUdC7sCV֕&a 0{nj!\I=4<3A7ҁjNFX̐*4ɣ4ֆv^ߝ>:4z3H'Ui'`S1g. ?b>Z&_g`943-5-U,HIGxGO% ̉k7@{hfAYǴv Zcz~y:~ZJ>VZ,wd)EML*8WTcSI%YK#L[t:es||Aڬrk}$⮡ՀP9!E4G6b0Z#*VvfI3s9Hjc-[P^ʍ~31@lb3!D'ٹ'm+9F-rܵA` @W΋{/Wbz#J:9Bb4~*UdchcU5VCwRGRJ)Oe;2s8RKi3x II8 i(\wbd8h"QJ/CvU6q ukدB<į'$8dxm08IrYaw*Ci:xn8gΐ `k=Pk. ŒjB2W m9`NLF8x6zO~ZŒ·wHqe~>}`a~%+g>>o\u Ҷt>`^Vf8\N~)H Y=-w0iW/kB풮,yQ8̉t T@ 񎜇ȁk)ňsҐ$PIE DB"4L Xi,ߧ߮,~pr%|>9Z<,/_r^;9 Fn3Iߏ_rsҸ5\B|2k`r{ {\{ ijbrkUxk>0~ wRߪ/ka-LlgǢʏe`LSW|IU韻_浹Gr@|Ӧ|՝umbyٚ]=^06@)I[X;0J NeQ'KTI;՘ +,z.P3-nFY q9 fg՗!.EL 8Zkf xbҤ棗f:mֿN_lROU{=oͯ_F伀b?,Y1Kk|{waNƂ{Q{M #&mx̼ftRR%ӇL@O~ ilj~2-c8yƟ=^ ޥN+/ɩT&zm7/=aF];wR*"ȯbĖm"O"xJ&:OYb2Ò` F\, kO mX= D3)Y-oS"-;N&YQ8E*H@ !\ J.Ehd<<1Ơ D6A҅SUf:-`GL]UpW֏{MiߴBw9{7A*ӪkQ^t> k<~k|7M1z Y2<^>33>zT$fO?iSpNc r PZpM5+2"b$ 2q@P"$HIO΢%]4UxPȩx~1HLN[[u_Y@Uz2vk<L$πzU6'UMUQ&ysRv+%nAcVOn}f ĉcշ;D1^[ٍቷ;j;U5:$cY i: xS^[8w6Mੁ~h*i&qAL.l(9l$v|S4xg_MCO!9f[D#Yx0\oiأ:=ފ/_ ױ`REn{`ѬƋWcYqܮePAZChEh$FZeըY8xXڏ|cYLG\i9~w?>\8]T2!-,0ACzN2rFtUieG#Cxdk7bq}ҴtԾ6&cA$D_oC~!`Z6r!ooc}w̗l5(tMhV/7>cb4MQwcNXJ#lc=8Pm>rQXvB?OH' i'8!YG@7m s~\?a0O' s~\?Ym&1%5F@N* EI4Y糷C?ٲ&O&\CfGN[LjN{{.IUmiv^ آ{+\ .w {p9@Z@& iL"qe:uh2 H)C2IL{g)M&4oš |w(ܗdMHH@9j@4ii>'r^A{$b*kmH 1_.;~@68l!;gӣHd?5a)2^OR0%V}[ |isʥY&Ƒjd78f^B(Ң/K,W_@N(~-pVudk"kNT߫t, LbA%^T;;mf#`~gm݇݉ 0ڛPKoiQ͘ uق^Ema;.Es J;9ZïXPw!kY"%1x܊u^ظo͇6j}I6{p5/N=RкD9lg+A:v*<ܑ{r${W,x1v?YhZXߕ-+;Noiay9N`.0 "- T)J\rw;i?._\:ޫw[(" hbrK%bJ>/cL-E^0jWG GV#i%[VpZgO (N*0D%$@tY$"R:"+q; Ұӽq8fb r#ȁgJNeԱQ^i8XG!1z ",iP΁;KFbu5mԴQӆԴְǓE'< {{a1ޔxNJѓW8Ϸ̡s=ۙ'*rN6WV\# BQԒo2MI?b2A{ښt }Tcd=LHٛq[Vs|G8WEI͂!g&C\pnZL9 ڇۈG0!S&qZFe)A@[/5^#m|tLW=];|eU>FjSO&.]+oummnkG{=:3-una]\ZuqD9ESBTJ ACJgk=]h:<"w jqTxG$踔FǤ"9̰4H%hD0 (O=rPE@^iU4囪֔3hgqXeaz0uӈDt am-LcL#-ÞQD_nZ:)Q.bG|0&(,'M AJHHbMએ"I[&{L1HNf,^83>U5dרx(^>Y/&W nz6ƽ_+}N]ϫ| 7 `)4Z ":z}4s{M0ݹ3mfwZd'7)v//|3}zUMi0~[t+XW~A0E ˊGEmq S\qKۋ4F.ArM@lVJʿ_͍ :_ xuo*_n~iZOQRMJR>L_$o".{o~w|W=xS/SoAI&Sh\*ξ(Ha!/=^ѶoVSW )-]xs.rb8_~9?[$,:szKF(hh(/] 蛍yzsgx j@MV }07$: zX|E ti!QJB ..]Ʒa}m#ϔ["xJyCH|~h6p5YϮRbƇhnʹ)yW/~*~/L$XEx{=k5  ybfK~un)VZn.j#e <]'/7WB&,Fhf )<"*$ZB!HpLD}dz=dZzy90V neɼf(鼴Z5x`P;">hDU9^rD9R|{&[9^PI7UU$N'=T%[t:O+ j xVFaگ25>Rjsfȵ1' C%19Bnz\0T)b+t@HmojXĕ1[,1Vٱۑeċ(q}=PZjT9oڲFvt!ι9B:xzTEǸί{qՃSc` -!Ŝ9s˭#z 2)zQN]:y!+dlbVQR= 648P=t:FY^1Tk%i+ԏ+tW^TJyuQ[ x m}.{>Ϸ>.WŶa=}hlΦFN|0r8cevgl~V(o^dWJUͯ(>fP|w] M|[ 6t14SJNkaF| q]'nBbwu _6 {|,C<=-Yr28|Ua@8k5w]'nbw _V n xc]jM찾sc=##GMjaZ|?C LiNKj*b5B̩5L@SLۙ!X5d@duo#MjsZl `Vlכlqfpl#Xqߒ=Fw&N"꣌-%1z,䩷l^.H('a̽ }Y9#PN~uc =ᏭzGlZ4~u*L?CPq\99z3q!hx|Z~>jJ`poJQq]nڎ3|Q݃ I !+/n !U?{uxZRMXЭS\-&x#32x5sf"ha/RQ:jVD;o.`+qw@GlU4tG J]B:`^eo) Ӂi]IՇ v`\e_E.qVqM3pe  D9k-(!<0G6Hcа;!QF{A:FHHjbKFbR$S, A9,a#ֱ>tlOrԴQFMRNZPOD2W6{s]*nDuG .fF3kTr;BVubRh<1,idXg' UwVHUBYr%5(XIan/QӡP[Ƣc\W1KkxW:Z5u޽ t6s[O◔Ϯ+-\=ٷj+Q sbBskג)E ޳mlWchpM$GXl{[$CֆQ;3$%J"%JR4uyϙ`mcEwj'7w b9vgIWpf΍jK|7&? wtt~.EgK?>- '>:=ɹ{Cn#GSm)>cl@xGd3=d/ߖ޵ENvjs Al7ղ,B 0kN71AS;8Q ?'ɴYE,\PQ"vՊY 1݆9j7r~ bL:AXH%[R:9 8wg p!ȶWYnk>Yc?ZU j8kNI/{i"ggsYW 7yYkKg U.fLR47}:v}>LN]R%ĺ )ދT>23hрf6}y"Xta94(1JEPk `7Or~sw7aG} ًYEǴBPj2V%{3t>!rVؽ3ջ7OF2 1=ȩmV1K;t<)8V4q&j=x>hv3n %3ML.CWD z;|^{<,Jd;2bЬS. L?h=ox\V%Y!PrNV L[ezg1A/J(YLOl+ncZuvشMX(Ѧ7x+ Z6X4ޙL$]50Ob)_-6qb!Tp輷E(1 ToX 6"IjNM`G̳&jYc؀[6rPCoS JsSjܒ牢_1ga֥?a{ aI(ժxTn[ճ?@URFoLU,=ۈ5k<ͼ, auरw ^ ߫pR+#̖b 7 {>yƁr*ߙ.?=Os^Ǻ+5(q]:.,t#g{]+U EL[*\ ap!@LuxX4++2e΃Td*owT~Z>Oƙt_(ѭg *T8?;m='ՖؤsOڍ>C~;J˄78'EE)ϫP/dLt0=K^ܤ\~w %΅er}ɢ8a,Mn4;0:ϩP 4Tցy0 zV$rOet'Х̛Ȭden(\N $j瀚mϒm7{2+!L"jJ> =mS }e%0 CHJ8*{}H?lgY o zŜYhو(UأDpYI oC."3k}'B]Gq)v{&x4wE~ԴiJ-ne`BG$ (AJHKJHȩ(x!r++ }J ºl FQ!"0@ X0O4W8r[])v>Oӥbŗ^ζ ,5蕌T1tTC#v=/]Ԯ(z24ei^)e!<ljhQQ p, bP(}Ea:;"2 @}8s )DM^uɽm[fyQx 9c~pal_q 2%EDs)ʁ']"U$iZaV䏙`AЋT =,ƴ@ AOBS{j?jjHacbD b.E,"1Eҗ 8F@|0He3iY[JQG|)0v33o:%J;_6>:_?Cz7I6/;T/מyO2n6IWjrq] :Xћ oʩ憕2D${gRjY ׶~вOnBܫ>g.<F) vz4ETJϓW4eӘz}/"gQ[9dqJ;]e{]\j՞wD \caD1"P@}gDPpH KQ2R}#rG@1FKzv{7Idˎmo%B<₺ʦubl"bgunH+:# 4ز:WWq%T3(nլEL[]Cܿ/?Գ%u_NX; KW;-4T'GÌ'=灂N.O|IX'2)=,:OtBD01}8.S` ?m:!@R|hFgd'Lf7]<RKrkf?y=&ԞSwJPJC.uD't-m$fO%n$G]-e5XWPr! j.!uTM|w>;ON)'xrr+fT 'Q,X?ſ'Cps6תSH-/A]tyN,~|>7/tVr*S.Q*?JL 7̼7l饮Ԧ˟7_pr6#Q^^~aeoCm05[:>…Uɢ];j_89{M43lMq9lAH?- BVey.ky/yoT*/zÍ|N]CEʗ/ r6J]IS盞&UWdC0ec:ZkEӥ=T7(o#VrmvOڱ4bͥN; :)Z:ϴ'gPS>NrBcDw~OA.**R`NNRrkI: @EnFb3܋0ko};e 4r:-s6LRo5^_9bmsU"ڧ8zG)z`c'9^SF("@H bB"?11)ƄC(ЧŦ@$(yUtr+9aNtP2V-tc9B+Y}K:@&5ӆ'j"V m|4>HK6|sXltq4\.3-53EhK)M֢4Smc(3kLӢ\x6X-"n֐?́{l#;k[U-VKj1W J1,=~F:?gJ܏g3eBlw^~uX:655c_`/+Wd )?绉.K1^*ʲB䏞\-2]4<7ubW!jzͺAKguX˅EVL`{e! E{iY/).r+vd|Cy ؝t8S=5YyTK'kjVϚQFs\k̍|jt58W;ѕi^Bl}EQv.WӗcQq3E(;+M>t29GJ6Q->DP˜>Xxm?틒P |֠YWiƵ?}G3nk5:~GQ99ڵ^PN;Sgԕj6 L7QfԳ:tzmgDu}Е)hY=352Rjwf4JPWm&Ϗ0:CgHTƝ  u-ԺEWR)@`wmo[&8kltvuVt4}2^B)̡{т*O[W(Xύj~|ˀ;DXo;Sn6[2]Q@?]Jae;SRȢ ǥ f۲ogUjY>h+v@_d[W큏qB}v IЕZm:+椌lȲiĚ?r'(uasL{#> $8 !p%d݈D̍!0e' | =Y9$mbx"$>G H1T<ǀ3P |Ӂ)K?ss^NެM&T?asws+t9IWsRR[8ZMB v uu#6>`ݧNA up<`G\l}Ň;VTuo{rSxЀ PҴ i ͙0T.-T{Y-@ΝbFsuATc61pv)m{>J]FӾ'5,5A:e6iSŋ^? +vCtb#>&(,,O}|>]!.6*jȼPd.Xou,?A顕9hIOv܃nlu |LJ/dzZ\,#y7iD˳sr(C",3 !Z+N 7~%Xk]tѺpwvHwoY:}ۓsd)ac2/ag(0b\zuނHC"S4**HuK}zʷA2 P>zVIz>xǕX;U:Z/Llo|=[󯄥zHՎP5RMYkYUƵ$98,gׁt -;9\2R4n`ަ$l~ M;e8\]`c9~-SGJ,q rrA>Z Ec\ZJj/hb\T. YVHh^;|(&ih0j򐍶F+Y$/ …jEWck2-=6?DBV ,-Jwf2ZƢ V<*.1,5;pcPj܁w:cD_p,e BY.ΛCHhA܍ rET*Ix VAA= -kl?*s $Ņs"΅7=^ M;:' eCwEo?DB u$O6#T(AZ0s4pN x o^sAjո\х" xZTtkDRe24n@-EuR=a!ZRy9d NgYjP:b8j" xLkT-HP4-ՅkL<-Cu0Jz2h}ϒ3}g" xc15X9Vc8ḵs1>ZݞFZW4MC@"2 ˀ%0 -SL%ܳ7L6Pe$J 'Xh' ;DB ƈ!dY#{ ApFnc%dHh |'?0""&Xy7u :@⫇iV\Ȃ= -зN3R%+㊅l}MF !f^k.ROvۃuNS#BRci5C=#?DB \5"_hi9ڞf,u }Hh޹N&+:#q+DQB?b43$w#_ݦYUz?)~1l k:y$;Q l5YGnj.SRg3/6Y.׏&xȽț WS)?Fm-` -qJ?_ǪcU*X~J?VM{J?V*n+f{N)q^?4Y]IOrqnxX_t%9G\f%zgch׬  aæ԰z:ϧikBu\.NNҷ-<4}t`b1&P <@V!ZWRୌYIȬ<2VN{.u~ Mj >f k4s szuҚXXUUo} XA:l)س=DBV7OŠ)E!(Q5x'T&UޓtŔf4 ~b'p hqY <(_ºB4=C$/i|4iDȌu{IyeRAh5DB ^탎 "49 8MAZJI!z=DŤ DNV@ !ZK_ -z IHUrMhJ@6p"ZfC$7_83<#~ƫB*Ia:Hh6϶L欵 -)%")mDϑ<P=$gaD˘̑[B FI[t:l~-o^/sHw֠`ZqI)!Zw51#ySM qqɂCpD]z%M`2dZJBk]02JKłRj6ݒӼBC7s!XhP"b6HNY(g9$e_9˭ {ܰsoFs}i~e'%3z~]+l9OՓ#r-[N6Wucr9%EV[-j iKp/xN Kp#JB/uŊ56̗*) C6n%t,G:TI ,"{jfRU 0Eދ}=v E*^h{:H1&.#^r-daD˸ /:Yiy@<~WK C6΋tɦs*)D-l$9Vkgij1>ѐܐnsGWܽMr4&ǓV}y:>ϫ?vymҼY7 4׿3mS?׿}X>juOl'Bv.vnj0fxt7vZz|?X킍'/7}TGXpM/m|~O~N_L+FsDX׹v沿dmIo>rqBd+W/|s|&uf}Le5u^q.V'{?/_o>x/nsPi{d6fn(uM4--Gndw4u`I;Rt s^uNOOϮڭbk-qS7kcBl17"<FAQ=:봼뷝їIE>tڭAi~GrWՃ!uSnew^}m6*}ODZɎk+<KgN3UݘI%Y}Y?_nˏ36lniDxТ6yY;}eRieY5r۶v8_ouɿG*_b{;$}z AXΆv uѬf5\wsk} `\qT&aML&8r%0qKTLE-)c6w=&{D|~qvKm1hW"lp`ӳfG-lLN&_m_eE6G󯮁iW(ɘme+z=[əJ{iJ5dt\.(~_ޟ)ִNf1op&y$iQr!_oK$jOb"s2@S|-QAqdJ*pY3_$*Y&@8hN:*:!O>׺m_4W 2Q#^~k=~o;v(tz.ը翴ovi <\nіr*[AY@7Y:*E_W[)p*Q XQ*\A  y5&XNd++{΍aET⬧luwooEkp9_;q^G~.?({Џ~~??.go醟=@~qBsƓߺhzGGLk]r]/Z$#8A=kcRzz29q2g\bIIrWLgʖ"utݹdQD3PtOwG~Z,h,@IJdIPUc6nl r#bMV?JhQh @^G`*;Mjzgm-3^1+>/G B7IyMpY^THb ޑg\t.$(ypcVDHQ % *b#w\ -LF\+ yٰq->3 s #B*٨xള0~_;ڧ<@ݧ<LhX߯Eyg A6~UyUK_v9fg> (_xҀxPp @$GԺ -q#D}(ˮdkkeb!Uщ2˪jA{T["S>L#G*75W.ȟG1 d`#S"X":p¡C ЃDxd(rrkpO> x υj@9)RX3`'{jɩ5ʠrָR1yn\O9 >r? Y~~Sfx})Ր"&J rN9hMU9_U^`,s1 ;ƍ8{7?u[RoǾ,^>+pCF m{+l|sN|om'sjL3Hx?+vqv/4S!\GRFOfw^y3s1{Vx47zmwٝXp祑1Snݥ0O|*lmh~=7.u=MSyT/Z]CVǼ8Ģ3.gPqLJ=лMOy*xw"S;v_mq{9keU`ISBlV9YYL>OPAd =(-;+ysȚ;rdC֬ + N[`{pll*FʃjUeֶ>%5T*EP*n*[NG =+bxM"آy 5r[%DLԥi[lP>W'UFb Yk޶]QZoHB6T*>Q)M9 (UsDh+aʹVGx"ō)oFM(h6mK[=[P|NUc G%4=Pؠ!-{ro&e~Zru(,E8>-'/[ZU+%דdq<Âqk?E㷎ԾLzw,+%4;~~~<8>b1(q}!d{֟!TE[%3{([V zzYՠjutkE)(/ξFl$qDQ&ǒ6"ZiQllݺ죇bf011I?0&} đ 3l1f, doBUТP TJX¾]YXVÄfAlŋ1墒3Prז"Y%AQM?;b6mX TcAѮ Xf6C̉ⴷR8k5$ ư-ҺuK;(F,8#3KoYϛuާrhbyfxC  *ACʑa=I/7I􋚸4<_V.ڹ^_K wqsT)/j)j"ډ^*ЁÈ*]\&rKT![5F~Ԗ2IՐ#hR]tZ\ BTkDmτdGn)3ttlSM24d:z}IK%l}؆|lfkIPTj-o-DG}qԊ69eN50[m* Pikixt{ }mᩈ8i1wl\R\"CX)9VG8Bלjȷ5+6lL[k16RoA J3ՒDV ֲRF4ޑݗBp{_+,ys hð~Z҃2k('sy#J(ey`8+1A:B9K>F_[en:b9FKuR E騫wCNQY{{Ϲ#{V3@7~P8s}}~-{miQol; !,@j4}Ŷ;ۈP_j;;˱0puš4uMr-z{4[~:~zl԰9?~I^tci[IҖ:6l\<|#cqͦ烞B>7{Z֎Jf#N0$xU$:y~>r|}O#7>.6'ҜnQ?E@5>f2&f;LMx|{q_֝qf&1:k.Zs( EMzt;g]~8aMXɖ;o^q7w"u {={sbBgF̨3vfΌڙQ;3jgF̨I3vΌڙQ;3jgF֙Q;3ꛟVZO`:3j'̨3vfΌڙQG uf:3jgF̨3vfΌHiMgF̨3vf.ΌڙQ;cgF̨ntfΌ ]vfΌ 3vfΌڙQ;3jgF̨3vfΌڙQ;3jgF̨3vfq̨3vfΌڙQ. $ ߲NC5 `UGX?HLME|FxN,ЫVӭYgXN>3&-[iG<YMdgw~a@bHU]>u@P;@ ,jӡP_T?t01he듽 $z-H|TޕlcL,DQ;a\ZULaTJ!7k ;D5w䎘U;i_a@ZʋEl/yTeuse#xqJ95.6^G."XYֲfɞ0wұb"Vf+mkk>WW:Am A޺=k.=*v9Y 獧Țxߢ1]9T6䘘/?q|R1p$bJPM}ICUsPeYO,fKu=xcDi~KwyO֋~cŮ8!mw?6˺1wLe +߷7C{5@Ez?dj'UsD`w!7:Vs7Ά\-sal.>:,ϡbg:ŋޝ獧˜<5✝$3綋#MZ= Ny_>ϙIoߑ_ASIS!fёk- ڨ"k bّ3$fepT NAce\ ZB0Er\)%6BT#F1TXoqۇ{ggk_uK}I[|ïf ?ifa͔ąZu̓l% 9}J5$vęN;&['E{K-db,}Ö[sge`ґjչ |I bLڢbDkct[x嵬}K kBeCs#椀rx0KS\ f<ӂA:Tr2^l{5'YO!B(ķE(,bn]LWq9\6#Y{{zCPTCbvX^E^e[̂!hڛVZ3M&umfL*.GtJğ,Ƹ2D$2}7 i[0RZW2JM lneV1Z.R,h7 V_M+UC]o7W`_ǼYcn4hFɒW95(fU_U6?2vb_a$`E?ߙ0Uޅ/rgjhwo:FS,VLcw6xrqgz&'GSP.zz#Ͽg6л|?=Y~%̏*%jVo&kh M'>s0YjoozQcǨwˈ^[c%d|垕?N\t9沂xS=Yg8:jϜjwM#R^]v]2tL>/QZ~= ߚ[rv^x춽(37A!-f{yK$BrYoқOM9~ڪiXQxZVy~n?o-R/Ld&e\y2H邶ۻ@K_jynG^[vZkI@_l0ٸsBy[cL"*A4{@F@(TR/S1ٞ15K5 筈yiM,FVlzǵQ&E@Y]֖-Nv%4 LsWGg< G;=Vn7V{"ß81W`'4]W{C馷>%K=x=hz870k|U8t4g\weELĿ3M%S .fO+f^Fb8gTE-VU^ G|gAE¼ [μLzs,v1אd9ek\hE9M$ʻXivļMMx{dqMl2+ϲs8{ur!R [bK~zP?B8?Hv 3}'mIZU"IWYXƛ +g˙z&ڧ@#| m*-w. 'SJ,˅KxQ: >395 FmfT'tQxK9T*9G1".C_Pw/ "&*y,ïhm+ ,K-EQrTFII Ib<ȘFnDtI'e ZyR8%C4 } #BehuM tVt@%fxHa.2½GWDW"jf#ˉ ª ~:`-zoH’To$Jq1kQ"0cıBn9{C2f %$G~SRpjh,kR$}0(92TBveI+%ȭW}e1 vK3.Cz%*i|z̆JY k'ATYgL%*04C)+#I "S#%Q屙O"qA$QTn@shnSH@{@domTDɨh:sw"0Um<!ZD4XMq#ffA'2߻H\%K//EJRd*E b b)E ,Rd*EJRd*EJRd*E6Ϩז"Cd/EJRd*EJRd*EJ0*Et"CP)2T@)2T +2V I+V$*EJ< JNA$Z*Q2%PZ?vekoA{ p|ngYbqcW W+I+E s$kF"$;nC42_x iPw 0q(ZKІJ‰dI4% ,hO>"< { LѠ ŸMa5a=%%lx<~:y"*CrjĿRE&zA47P y-!b D= ^2,X0MI"H Үl}{3!Q&1Erybh"rJLSK)h`YKr-% r[J;g?~ى"ޟw]ן{mUro0p}Bꋿ"LatO —SO35\s7?|ߩVY|Pa<9Otz )Gy=yg ,]Gk Ql7Zu49sO 9_Q,7c;eD/|r`O'.@VsYAo2^%tw#Z;1OW)K)]x 6l9Ze~bn%("V_f?7ZbLDsfDf-_B/lv.>A"#Ņ:u٪faCrgIceɇҺF^T)nsN!ʷP,͒atup>f{yK$BrYoқOM9~ڪiXQwZVy~n?o-R/Ld&e\y2H邶ۻ@K_jy_pG[vZkIL_liaqB;eǘDU2hj 6Q'^b=c0j pUϷk~.NDft[뼴&A+6 mʨ Li"y,. kS'[LS]^<}wFYO=Nհ[{͙xȇl4/tڂJ4DSb|JjYʡd'ut"XU1cx%wU */l6 )m2뭤H^C2G˳_J)WJi^y-;j${RN^ɔI+)zZ!K~z !VgfuzTsxu1Ub`BH^/~\f~+U67Sk8*mAZτ(+ iNC6RԞOB Nu DN)μSPכB]n5 I! qw=nH4\|1qg'$?O[gH'WdF5#2[lvXEXb`1@1gz4‚SџV7X}0xĨ˥wWdX2"n 3^.~*f[ڵPC  8Wl9r_;@t9G15:T@`xKwF!~Ftɶq;&?@^FBXY RSFDD b FрG!eLD+IgOg7$?Tճv۱}/] ˓hgr=,4ֲޜ$Kp3"ا|KI UDpC@K,s$xw7i?+Ȫ;WYseW6ajA%7S*2݆ YY/OG/3>k<+Y1O1W,볆nЫgVR%ra*_@50KWn@⌢"C+:*K<hQLv0qSAprxmO8OQ< ΕKQjCX3((dQOޯ!x.sq&cz]&1RQ0{Q* BkK4*5AR ^_nxt|`:Dc &O@.MH؟g2/Y=KhÏ"pnH\Mǽ)4 R_<(Ji eBF뛗I(9V?@,rmbIZ/u ]cԶ=Jgڮx@[a< 6W?~W.0-v zQҴU9(dm/y䐵M(}m#X]ke[y2\HK}Wm 1[dY5 ̒$H#Qb$%.w}ieZ%[Ob&qdM(ǚEJ]aV5:J'Z"'_"1r)D𥖜N)bxЄ^n%q.`9VNǣ~lL^oZnAXQ5L%#%24nnsl'z>͓b`YVy_ Tɖj?X'jfo*h Ȍ0UnYә6ՓG9N;;7M_RM}\ַ ^NNm.V٦LO-,O5t(/ ӟFεnavЙanYm-tMPI/bkE $rCF3*0H-mٗw+ ЪMmTu6 /*$j]z /6D=&b]8tހ @&po  3ǁLym!];D|8IjX*oj^,oXMٽzrXtt~HL[Ì9޶8tJYcp> >0\9Ҫ ,Tw1!JڍaaG`Q9?[~PO )CQWỎFO^7f8hLǘOݯ r$6GRW8(Tp{跥6Pn#2u`VSt7r:' QL~;`%~~\O}`uDa ~9ep8ƽZ?\,tvY2o,&Z DV)~Ti,p)zmlMA6"fFTt+_ ^>gR+9'qE~Ae5 8RJS\s_ o~tڃ;WY(cB3]CnnTu,Yq@M}Нh\ubo` ͏no{>zn6uy Ud_h|Kv措9WӛUQ0EBpͮIDp]gQyLFu`UQ] h#dՃF q&p4H>\/U\::|~S*d`YJp}=LFo{aeBQ>B<'wQbwwfm0SX鋷H-\PzX5,[y0ȥpMiu-3P*eT4E,@9mO:KAoMNk4ڲqՖQMyM/d0a4B3MHc)<Q1$֊ AM g" l=7 pSVM5DIX {VgR[C^ *Qrah}>biSLj{u{t&Kn xpv|3z$_yAQe1ye\u. >ZswMzA] wTy sZD)Z2li%%$=E1U7p쬫AVzj5&eJˈQciåG >A;c$ੋ; 5Q<[:ENrruX+0$qÙ%\!]wRjP.xj i˽]\d_;WwMS_Oh_4Xǹ h{hX;ΆѼf- 6g"H=bZ][ޟI.9uo]RDSMV aƹljg)hFg~?gzLsR|NVR;K=M#%"*o88:[03]؋cG 8ߋ06珱a>EJs!Dfx(>NSf"RV&ST`g&gCVri>!6G%T6 &'[D;[_ri bQq1ƒ>Qx]׻pioJ$rȌ@-]^߫%f5.uj]VwLvt:omnfZв}EݝYZh]Rޮ~sbE /e:ʨkEs6gj{ȑ_1.z̗"Y`pwf :FّɯԲ%Y-6eI $)ͮTXhoPnM- wGo~7+Cq-,[nͧpfFj6 +eלz+q'Z/>y暗yv>]Cg$qI/&ˍXC f~TkB0'ueIsqNtw0+H4V/$4>a:)U= Y&Pd(gO%)W!'ȔDpvC4]Mf!ENֽ'qz2qztwUlU߫ix&ǘg FYrAB Pѥ93%Z?B ^TyCמZŹR\z4y}Fɟ7htmt=kk<ea!. &'}XIYt m`XC,{fuiwq@)"A&C`-Iצ9,'Oƹ{kk{G+N $Y2 \I'͍LYF:!Z506bnHkHkH;$aEpPY ȶ=r-C%>#eKx$N|V^j`a=fR/Wh&PNFPWYv"]eGh&N;#zn":|_ <8ǩ)ꘁ\Đ GaES2t@"\9MխR2:,0#MI6f]XiC,ǔ⁕Zʥ(xjcgH[!'!%Ѥ`TʨJ\Aؠ^R#PKX61WEO^)k5N)$Z Iȃՠu?9K;'9 |Dlb,}2 ୍a1@ȨU2xGۿ2v;47hrZ#_'T#>vz3J ՟)G}UuA`. U݄7 zW`=H\"+IYLA2Z#ī,&b"Ld,ZIkתõJ;YE oP:i3w7u?rr`~rT]?mn ߨޑRBO\g(+0: !H.d]$P; oC!S&pdDq-q߳?w ntsuUVƊwan-}D|bu86ɧa92e$]o@eHYr19 tj@$2qLefeB[{_g}m!>k?pvNn{rEޟ9CowH54ܜ$n,uL,Xv6#"(rDEJ<7+H1HR/5pS|:@3[:%>Qt7)Тj T-x n܀sͲn呁@Ơxg,*W{P;EԳTjm|wpQFmAVҶ5ָ.F_ۯrʭJ-1OR ao)IW;"\ObufCSh.eL^LHﱄLbW-qY/uųe.UZRe5Ic_/e_-th*ڨ9PÁ㤂o' 5a%ZSU'/'y{-rHhGH'QJv+Dӂx"$5 (JVk\8eLd`iy%[X#gS,€*m8JepY./t = $ Ww54ĘrgbTkP*ERR˅_t%hCO25t-%*Ԟ3Eǵf>Cb.ouy8{Dˮ-H'UWRnu+>Oe(N9?xvdqdI^f$AҠrlH0+Kx:W G)+vZVK:MJOɪQ#G%G:r2qØ]41D>꓾^I$X.LdH|kn2ibsl#i#kPUEp\eY,Z]0Y:}>cAA:U.]k8gZjy2ýO]nqW`M1,ͲfYY`yۻiF4/ih>T&Q՝@8;X=*Y#0{]72ƓChT )#RdY)_%Jo!k s! F&hٯ ^3|= ??Rf߈ޑAu )G4I;;J \g]}D5b׶ 6nxLH%C h\r)2Cop3z_x0"zv2 ݭ$-.yk%LHb=VyJ><RZ_0ӗݾ 1iA޺qV4klT'N\BQIZPR bYȠbɍ!(S*I -Y^\JZeAl#)E\$.2њeaeO kㅮL/ g(.N!MϲXl{}V7)"w{Yæ6urw䆝绝%k賐^w}s{7_tg>ܵAnֻsn6Oz~l\#o祖 ;r/{8x踏X;pKO] ]xE1w/δ4}~~fE|u,Yn6?mn,Zy`ŹF3q;ru=N|Zd$At*kہD;@2to&hqt[3R%>녧y\ - iq(;<].HV1֯ճ&4ό9U1NKpب?zN v0| 1:+6w$Q&hMvQJ%)\F.'w%3ɪ4*ހ+՞kE!~VpC#S*zcj g7DdVTd 07DM; 1g5q F=]Yo#9+f2@=4zfm fv{O[[d݋L].K)KvI1̈`oO_7w0o2vv+wM-R=v2t7GQZ~{7vBٷSX-[y,y .lCgGX>~#V#eb./a֚-9@|uu9.%F.RE|;7sߨVيQ|0觹zlx+h01>~ڭmN]833upuUCVU|~v\#{}%WWNhm¼ɇҺF!tzKyʉF s|۹>N@wH&)J>MfsVEì]W{S[]IG5AQ )eL.m@@sLjǼ/.Z;e'vٖ(z&KǕ;ccLcEqNƈq liIbhHd{mG_C`Tjs?匱NJ A22eDXx*N!Hu]Ɣ-Vqr7<|mĞ8Ϸ/򐍦OV氪'U+pU:M4UTxUk0]_%:NC0*$hc!nDƆrySpJQj%#;vV?$Xքx]%8OX'-(JiWXXj3۽VGXQ3Zk{Rja3]l :2AѐxЋS՜ϗc H y\tPH|( 'A|:uA] u=g7[Z6$ PRj,WqHd"tJDĂ)1Z jk=wؠ-}xI?|-?xL"a/P4PaƅAβ=GMUg!kCsW]a6 Kg4M(|G B+?Sc5f p_wp_Hj V&YŚS +PPHK sZ(@Hٯx_gO8#:_{7@%[#g==5tG`.8 !߁w7ɧY()9ӞSE11l`(8P#4Z"K r[?y_o:ɔ}z]&O`Gdn3~Xˆiz v~lV /9 ̂.+rS,(!p$!&(ŨQbT KAJQg4Y+a!7y/T@; |?ov;ݭqeт۫(ukaP5ߣiq \!E0`N0XEB+ 5߱LT0a]W6n#EKj۰82;-m,M]yr垸ۯ< Ĩ捻_цfXR3~ޡY[6}#b̦Ij"Lۭvޒ} oclyy ..^;B::)k_t MuݖʻBQp0/&pKY _?$ 2xHEΰYL[@ހa`PP``nZD# }D"/G7܏)-r]^u.91FaBoN>wy)=E]ܗ+_] g`=|q{HL|F*Ӻ"9C@cv,ECA$#' A͔<lƷ/v]lZ0FIo5A|+5f^xIͤ :鷺*5U 1':ߚw*Z7d+KyZ0.DRD &c.D8L0m93ъ'lz'82UژkO5`Bj#|@nr%gb/?z P"X- JH ]"IVLȄ#ڀd[x#p&T0D$52B1ȸo5EThRt`*ylN9%x%zr< &iݙٽ7+_AmYe(,KE*Qel5^Kx/AGU+*਀WTJ\Q+*qE%Wn%WTJ\Q+*qE%WT⊊ee0dujJSE%D&{W uw<=^Y1,J~Ch4 5&^~y :cn0Fq_b*N?~ -I7X?%zɟ^%zɟ^!UaJ<7(^`aTP:ʈcxû^,{Sevvhp/rV ?)Q éc!Q^r|$3VogvC/q5Jk, AG-M:0"jSpԏA~DӬIfZ:JS ^@4-?W.S+[;ҒAx1:e*Flh?7xƪ]˽ o@zo9tʉmoc8dތaoHy^DVn/;!ش\8=M >@2'DNÚEz+2ǯ:lYrp!.yv*iE>'ĭH)1+4#FcMqv&$ƹV7,2&gƀ.ue D)đh'꜂,97,AC% NNUxA1J%U˗g {ֵ9(^xy0#^2oZR HPxRےР/^RgHTHO!HМ(p̋y +D X zuㄹB9&Qd%rT /[#gLjZ7cb۷=>nUPu05'!cG>Ǔx[U& }f,[nVݻ 'U0kk3Ig/$S6]^ ~|o}vhu5խ7wotfͳYu-D }Λz^h&w7Ǽ i;/g'g7ɨa5ß6)I4?μkGz(kڡ !^܊m~"hEmovɾK%b}[i{86u0w L*ӬF_XeT0 Uj8(fw\ {$ǛӅ6+b, q&<\w't!X |.fl`^'!NVQF3m[bsfY*qVp F:"A(Q$ Sz#w 7@`Zs"N)}b"gQ%4/ksWᓥ#x Q#Ù{GyL+oO6phV5*Z ZZƠSV bIFPRmkٮ78c_]H[օ£¥M]`5X>~&ɯ ;حBj8PM3(fzI6IkH _.RAfجthf0er4H rX.# *Hc@؝cƞƴX(QH~K H[XA`Ir8a#ֱ>0A`@ڀiDړF@c4^EE 0Z؅]ʇ-睔?Խ3Ⱦ\ܣ[u"Lʼ# /2(#P\5SX)PWO  \A̓@c9'(O\YsV(Eі CPR|!{w䡈 D6 ܗFW4YTsa6Vptt!<<0sD bʑ|'p) *=V6FDi8,н:iD":R6-LcRO#-Þ7X:# $ us)s R6a>,'DhLQrRz>(1T{ >zKH gbe,Ɛ<R7G;Ǧn;-p6WsIRo^ʪh^8OָSw45ɺOWOCd?X Mٵ3/ŒmMً_Z60{YܙmB|Uez'd?eZ5W뾫V:Ja _ߤLV=MyU:*ۡ+-Utr~֨!ur̨M7⻲ٕ\wBBu;gRͦ G;8?/ơ2<|aCq#+@p~^? iʺIF.T7E~? %Z[|\]?\`鳋IćhEbcYV?wm2¤yՍiu^USfI[$dT4M/R V:WN呀Vf凒lgl!GC=yL٨1LX&jҊ1.XkkE aN g" q}鵏Ѥyߎic=aDIX {XϤJͽ@!U5FOLX^Mrlb Ͱ>6MGmQǷ7!q"X6dvVF&v_.iJ4 ̲c $, sZD)3ln%9?Np2==|w6{|w-y Ivp~wo?9[a>p!mds0Onmo w; yx#lM8Z7[.F#vzwT.X`>^R 'L6 $<ҎSĐ`Տc䊃k6",^>v_P+R TH6>imw19PAU7R%)؀1T)b+t@Hm9HysQ b} nvOOܯPy%jj6tv H"'IӒHz0OaŲ|R#W2:ER.<`s-:<ʠX]z򄫟&=e̒VQR*3&՞sWL#,8ZIZw-Ьj=ZxpN忧)Et?qX]&0TxKNT,.)]9L$&, t33hB>t]}y_Rgf0bbֻĔ?7$hJ:|ra道'e.Tk򃙿YF]q:~qtp_O~Lg,ga $-VA.ea`t+DQH(ޘP]).\.Dy qBrm˃w4jgVXO͒G šhzs.n]x3f8wJ Rω4Y $g\Kr1pr4(lim9h'lBVBnpm05{[.,-OЕڪl$X7yK_wS{. w 2Jb{A\\+Zo YM'60e=-Zcbd]+ٳE"D!]^Bd, ! cR+BfNUK%ב0mӫIH2J)9~!Z0(&4F gJx?sQ|(ćÜď$NKJpj?E"ѥñƥ ݹbg>Xoygu%#/0bQG"`"RSFDD b FрG!eLDc7ד>ze>m9^*P&X}aWUFǧ'.EJ>_cL× psE*L\. = s2`|<`ޚYm#kֳVǘ(lb]G>4^Cjߴydw !dOm>.a*tp#O݇1y[KۄjctK./Hyd~o*/Aty{Dp(Pl2Λeҕ\FW6U&܎&bZTmP0ix[1sK3Qix-~RcNBN/4ClSP_ޞNzXtH @BIV2v4`#K9.⮠!x{=`"`lcσ9`G?GtQnɄyka ^>N˴'\1/H{9 Wr>n?PT)e\rlGd$لAXIE!@ta .)!Y['UޠEyM nXL1HMΫ*z3`lK%jf?։giKYjO9u./O(XVVcaUVw2CŶMtd/_vH/W;`DH XWN3zբeFhCH(fAE+An&ՆS⥢Ya96S&aρ讽WJM @VbP!6RdظpHIe&ShitKpڂ[.L;(?1il'T9,Q 3!J19`(q{mV( N.jl A) .9Z5!!&%'R>E:j0H5A2 h` | \KLHȳVaOX`]rIX_G)ƾȬO5:MXcec0Qy H" ]@3җ"'teOXC%Xe0'!Hdh5"fB5`,g pPjsAuɑWm S,XˈwhlrX F4ͮYED 򁵆k!N04/U`;#8pZ(ϤHTK@yUmm)cI"QTcpmVeқnjE . klV!VjpUfg19ULA^% V`%B֒vX-R_59Yk@"KN\Qs(J[2JB "1cHٞVlu:U%*\!cb*2%CfPl {cTVctNE $yV̀j@o]Q?B2.@Ơ) yjb{+$R Lh-QJ'Υ*"xk"E1] ˄aK$-0&lJ80)8ЙVNecX"7V d VQ'J[e@SQ3Dt0GQF5Tq]WV1(l  ~"R/ؕ(27j\wWPqyIلFT0ȋEگZQ $WEQ8PR46Ja<| $:xe( WE7 JΘ @<7Hmra*F%̷<k1 E%eA; B u@esa C:ڎ!?Ak.FU|eM ޅLP6yd-Dn>b‘J{ ۮdpҫhR0 Rn dPgP j7V[/XP0F4DL iY(}R(T)pVmLmX( AJt+S[x[Q -xi`u+*@DUĝfE <-`9%^V0N+<>ؽ?n{êU轸sR ("m'PG]rs@ QMހwK }Y] P9B(5b2HcBUBGm` @;k1-PxW3K:vnjU 9bFP4X%Y֨48^pӁ$6e#s4pqإ7Fu& gh ` eDAo 2Zdpa,p]asWc;gQPB@t",U@k6֜ MCuea,X#5Ѐ7T*[fy/z+먱^u039l@E}׌_NV%Ta=ҹdJqcmd;kFP~,Onγ\o*4fth%2X 覫2l&x ѹa0`خ,E2hZ\@Gɪas7㺃)Vjnhͻ _𙻶D,T@s1%%]~Ar)p A(P`AHnGTi'O H@X ׷52[66I#+dOU.೷,o5Z,ZM8~qϪj*;&% x@m_n5{J Xi)ނޭ'lm.}H&/ W~P5!*ƨcƉ*=!)^!^۸M< z|~6MPc\_/^E /dRY޵Mx1Ɂ\m*=;Wf*.l-fZLПD'@͒iµf܆ԑ6o_|jO45W_7(-٩"$oXVd[ե:,t>#}a5Zzf-9pJ}ĶDRt`M14KZ&02O\D>FɥdE,_Q.bhW'R^|t\-ck(^Sqm5E_7πD51pP !X+ed < (^Q]I~kk4\n[ $n mv;6èOzzs1W#Qc$jD15FH#Qc$jD15FH#Qc$jD15FH#Qc$jD15FH#Qc$jD15FH#Qc$jD1ma15F#y+;HVWɷ={jG#mH DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@zJ iI $8=%P3cQ5kyJ X7ΓHº^#үHO3i<~-yX qV{aPVPE,j5$`o?Bt(f}Bo* P;J21c9̲ą<\֪ބMonM`zŅ!6]P;U*qCw\|+x7@q&zs \U.V/kaJDD(t6pFVh#)/lΝِ[#MH[#n37?፻3oאSOA'|pԮwl\Li}n4]0ꧏ߿n y\H_ !_g Iœ0qc}~&6uF.a}2Qq9?+Pi)˗%7Ii?|ʟͧ']']hp|r5^'\7 VwYjAϼ\~V^oE~Q W}7]jgUb~:_{ҿˎMnl>{4kmŭlRAos3ԜRIwVKGwyX@oV7D"%YjHΉoǧ%ɔ76p.m V HC˅m_BP3a?ɿ1oM.WˏfD`=p105s8{o3NI׏nάv϶qt l}uvJJz7u}#7z}⃮ыqYKpls5Eq2ҜjmvB#rgE6w7%Q7s!'h_bhm5^ 36[쨬l{.^=okqX*Bi\F2QPEB]+Vshbֺdt:+QԋW?~wu5ԓn!FB6[}_;)j*"ny=e>r?Rk;6AXSsy,k 0zvpCQVam]/bXKҟ#} 8e%ecfߔJ)¢RF4+~qԺ#~f~1*/#QO/VFܿ(mn?Oy_N@?o?ty #"ւ0ڸ&܎˦ܯJԑ;1$}ܣMZfފ~8ϯș6ճ^7ٽxύ{υy\Y}9uwy/>]AmWp+YD;60>  ;Q_BD A 7^X#v;=m/EN;3qp+Isn I7e߳n mnSٸ|^Nq]CeS ?Yܬpwj{#7\G6_!gld2xuIւBi]-iuH,j};n_WDWf4 (XAȱ=C^w'n gO'#9(h>XhS5ds_(J,n]Ys~ 54#sdlBUd?,`>[hEy_οC]fS7ִvZkN-[&=d v od1/dZ!mePIOLҾR}c!^ ,?=ɊȌQ{kb4hFw\[)mR49%مam w$1{ۣ3Xs{Nf㏷#WoGE>A9E?O)8 }zygC3/kb47TE-V^G|gAAc/lJb){DմңÄ6)AZyVS+y^x..2o{8@Ph7 Q,h||`}Rx;?H{swɗu%þiIhsao:;VX|"s9{]P!=Pp"[L{jϐn^@qU÷]o{ED kz1Y>\c/%d(y|2?`8Ǧ~~Ln?ϯjechZ)+d72PY8s  $WhNig=IY⹍Qp)Dv' p)qbn5(y>8IV:*3eB&BDdFD*NҠV 2^ _UnYCc"pDNB$nZuPyx+)J\#)2)$&eEto _7qe,NG@31ą K0Ӂ":#&(! \j=y dǻZ_rr%s]V9yZ!!9@Zܺipg<3{`࿐;G8\]TOLsf9!,pdQSki̫'-fnO wg܆'.AfuW=z`⺈FSlzOΒ4_ mLݫ|5%K1CYalsP?>m~8&{#dMCqVq E,ڤ9B g *Eo#ϼ0^iJ'^8Wi\H?.㒓߽<6߾^_k9IAi<$%4sKy%QǬL@i.DD[FT!S$@&Snho2ɍTyĄ]sEzZZ-w^;4iM嫋4v|:ʇ xr܉G* *d \FD^GBG% 3:c0ڲ(z `zDp:T,̈Bk8%~-$3x,dB£µC]`hn^lgBr;&_F;؝BBM3ޣ%WDFp,'&&,RDf5dΖ6z9=D]pX­o$h@LFf@bLP%@.NIIFЂ\}XcYC Tڤ4H>`(PTs0!"΄NS q sZi=.ÎF^q…Ž싊aN-`/OK#ZҙVP2Wy|R^u) QofL!H^&^P*Qyz.̻xsKkBEShw1bWѻ[8NbH'|dZj SI AH$N{Yv F'( DsPYCmT[/, !۞\+ 2Yvg8EG3nOhp c.( '͢%$#ssCftLr+tP 8 :xg)I "`egs9>&БeYj|v-fC(p#Nj5Ʌ3o˜H 3:9,蒌T;oeݪ^'U߻zom%Sۉu]Bf+P7@ :-~ ڨ<QJO5zf_dw] F鱢pPߪvUcjl{MDa3Aidp8 'yq2Js+ C\l2QX'+KuTga"!mq?D@DH^z[$&s|5Lo$~WFCºx%]i@ -eNj Qh ,mSR$*ɭ5؛ݘ); հ۽'=scoZ|X.h%b-XZhAdQYROhtJ)wW0"$3Dݑ~|[4ttaT&Y 5.h~/2toPf^xxP<*%AZ $NPUז eV1'8ÁHT ƹDlgK7E\vxB2kvC_gQ;3\,jz<,3pfϘ=c17,Cwջ3V//}w}tͮNk۾g R]HQX)DaMdYEOnc6RJ;)PQ!*Qϸ(bNYDS7H%Ofw<9xPkYwN4oc>1g/6./=JZ˝*zRe6U^=6E}1%2V_JI"VPz%2%^aI"m5s-n1)ؑf&̊  "PXvk!6lfrޙ{$d~ Ǝe>[ :z($3@x1cDnDtI'e Zy?(!jG@DyBGAJ{:> u'p䓒Λ?˶.̦yޅO]{A3tŭvPf!etH"R,. ]ۉjqۜvz̼Eb0L̷{84v&+zѡ=n9X=MUXbh-OJv5oy Ϸ̡cZ:d_br]q%nG]LF7*7wm>~dq+dR*£b_h(p؛~hu8utuxL+TV7; Gʞoѻ__StgG<߽J<6߾^Xqk" :(trfyn)Rgzɑ_1il^ A6v>̱lO#hVIU]ׯ`갤e$; t2̈82:&)伪'Ӂk"l%x-G@rC)i-pT 0+ ]D6;\vBx5>9zUϪ_Ut`YkYQѥHBθ\c 2ZdAl.92$ɨ59+`)K.H`,с&eDl &5ˋ0]3nF)Gх8c_](*Bƒ~rMox_z/ۗH}aIM"TfU@0N*RQ(D MB%b}]-@P?!D*jSjO4$#\"$. pN爙Ec+kjlaOs Zq*Z[tZC5TU&c8%R Y0C*H.k~p6KNƖr,gL CH6\&CBB57md6 04!cko:I$CJYKbYYTҦ.XHJZ( l ʞKc֜|cK`ޣ5i]=rW}6c~nH%$4JR/HǢ lCH!w{2w`kZki=G=ymTr}7| >;UFiz^NMx6s\֞ߏ`{q;=4o"u !ubsRF4˻w7zY˕\?ׯ?&|^grO֨aOViqŠ2G澂}[Dl3]kο]}kBxXuÍnK,K(NԪfچnJGVύnյ%Flg*գfYl_FuuXvrNHk794mFDŽ͉1*r,2AyTO0t =p* *ո/O{|u_龣b;<OJVfm -ۦ1n{a]ZWѨ.hZsQbe9?G}8I_\#?/(}y)#?ǻ:^˅ڢi@&,.͚l{X<,Ifs!hD %Y3ςʝҾ7|?%T#z?$aML&8rg%+6qKTLE y{NJ ùI{&F͇ HM'(ХIh ]KZ:SI Ĝ4" 44eFZɀHnR\QiXv..GKEx,B0rd79Ѳ:{\ƉOڈoOPxD}sve=j98kAž՛`}}p%C[TۯCu5.HetY(°`Sx59&:>Z7VJ֩MUYze%f$lpdʘ'ס*(4t_C+ %Fӣ[Ļ d[[?6]<i8B;j]8B-SO.T*ӥVjp-mJx9{R\i 9rufJ%9"sMP5YRG3q>ܐu*=9x-cA\YUt]M=ߟ ,XCfYrKϙcVr1Y Y5-Ss%w* : 1J.d|9Z5V{?lyTt ruT1;c Blek|l ./@ oc>B"!Qg*y{Ň^" e%ڀʀYrzI:s [&#g檵$\z_g]%w>۰d8g:n3xw[6/'L۬Gƅ{s )|rzOQ`tY'7ܜpY,$s`.#"QB+/HJ7+ܔ ei@Fpx[i6OH-6ESD6lT?7Ӷ2N.OG.[I  eI<`uBB75,Uc)[6ֆͷa;4qE'~}Ќ;-wJZniUr; ݐučNUT-W~不u'c$c\rSA$cDVsf{o1,A'c'$cC%o d ;o]f}\W;:wܡʀ1nu6.g_o05ix|MNʠIVܔpSS;Uui/^q^Py^Zydu!gЀbp.ifLhɩ aWl*ש྆^AGS7 5I0)1 M73a7t*u3l$ýz^IgV?W98eFoSɤvbK+eFj4&pk5eW"&8& LCgb ?tH}Wu˂l J"(@h(׬/qprR`IfVؔУ턷NY*Gaٴ'Sk燼\7 o,o |۹cR9#&1 \<~Xkӫy̫&dcd.M׵P36+P:+;tV-]4^:{m!`P!x2gPq_L'z%84Xuȋ;j!?L:R\>Kr #EHV Qb\MFRnI t5VMq\`w9=Jzm6AAz+_;59%x%zr< gg+_o:r[56EkYn+:H+B*'1Ji~ KFB$K3 h P%ѕ޵6r#"SM$x`=H),x)zȲcy2?-ɖeKؔ%ʹlݪfbjK4ɗgl7 C>&D%L1rBQd-#NVr=J;_!w+̝QN.e7mt߼Y%ܫ0 v$[ o&g_~ntuq=y6II1PA2_HPwW?V]L]=i1b}b}zˎ-b|pCB jt,H5C~F3E*cO P0Q ȹAb(!h|Uy{& epWҗ&Ur Ժ7}Lf0}_ҁm8ϳ'Eָ409{BjDHuڀNi)T P+2/>e6 'm3͋R8Cז$b`hjcu~u>&1ѴZ7$&0rv3{rXjf|-g|7Ÿ.?-{YL8} fg%T>gQm ׬R[s8,Ɍ[/Z7P47'4wa{']0dTdqGmgl-71i;cMwM[T87ha_y-TZ2֕d@_E_UgX' Sj #CQaaw(p 6 Mήx]+a6,Dk|1ZL$FZimRZQ90 U{Uz޸WKK7ǾbO]= uci$ '$YYD]jVBI.dLQ׆q F۶͔(>" VE|`o=X˔(9e¡*3*d6SَײΖϞAxR)9K_ì2NTӋ~E}=u{4etJ2XbO5YMi( l G'g(6"LL@$[C'$1x2*zYKK3r!; u`ɦUd fAYw#HAx]Bvflk_`|C@PcٶA[t%$)-Eu>$"eᔳ%Pt6D aQlL c$c-/(y(Mpg> )QBNFq+J5Uf dm'U=Al{C[5!6Cj2 FZu,QZkV*j~Qճ÷L)]wW-n ew|ȥ馡e4~n:khx<4NFv(bˮ9;`]kg8}0v$SqNCĎZ" o[3 6#4!C;`gr>oeܖ xFvImn۬I֦f~S W??ض& ̑J@(1R,wf3y3&YF5N%. \ y D\ Um.VlGq We]":l[)5>Ehn+rO%ty:k+4/}ZHu"wjs*h П YCM]9h Cnư^u`φd8UȒ1֘R R RFWtELXN[`)Ks]$hPdAKK-tt8}@:Zt.9Z2R@,ED @iIE vqLJͭ(C9'Zw@_rqTEX*9N˓뫛>]tM,Z~}pUܙ'{'Mr~n˝.k ><:6Ds.ϔ|N,BBF]`GUi, :,a"IKFSh-9E-ebCʹrJ`)iXYTk،'4ڈR qsuj w o)voYjz7L<ɟ x|q|[_Yc{yfFeV)I@qYE &(apb(NJc m568(P"U F*FyЛd]c݌;^Lsy0wV S Q8ѳX7YEcR ɜ1"ѶEa4RtZ"{Ȍ %,L"E%u1Eblh]آ9a/?b<،?FA#N \n+i鋱eǮcrH@.%vrZVb[D,u~A^ҎZv 7kz3^EmXRaWqg y{z>lUP,cCCab͝λĆ]fW۴lvճ;<%z͚LôX؊w&Y(%d{&"$C4Z/ 1&քF )ZǦ*{ SZR"Z8|\p@ɤlc^Ef<1[χ6ݯӦ{#d_y-~`{{$-h0e_ cI3NwFe JRydnK޿NLNʲ[$Nپl81IR`2ɀ AcP̸4ٶ僰. ST!IC.L\"LINH#_҈en`fn-!0!kdJuHPIZ)S۵+JNdaI;RcAzr` .) rIK*ɒ RVmY H*%Y<~&b?~L =yEȔG?Mm$ӧw??g4.tW/..cH^UGHn>uEgRXut֒-˫sOorA 5K9r^J]ϛQ#B>]ы7e+}O5b ]ΤQ$hQAWCQMI2$eI^~W,^6O<ԍƸɇҺF.tE qȁs0,Ἧo.ґlc"9Al4°8+ QPK5 W8 ާ&^,d>ih)LlXVQEêu:WЫhDS&LN> yI`!l!Kn"'Jќdsk,bC1scgF9Z ~tBjE}DA@IECցY1]8 GM +tdXDE:Z"aEbKDcʭrt_ FWNvic p( h&0ޗ'wWc4+f&ة>Q}BP%g'}VG( cPNTAwBM,5?G5~V,3v(KM#9|"R ,-8󴖙PoA5()iMl#5\Q0ʐr:8MHY)3Z[1ZABKIw N'Hg5tWWO|ݶ.S }|t,bɹOlADD33O] ̺4J%Bߩzy%= 8GP}Jbx Q(OKH6 UϋSԙj}j޾Dn$m2rWo̸8PuJM^Ť?9.Jep'TZyyn͢B(C96nޯj^~E͍a<movs(VK˳ ef٧9 Q5kλ?/mttS/yN|ʿĻ>?ZaualsKN`ɜV'r >D-;tJo洚o9%BApNzhqLV\F8ڍJ)K\վS1#)'#JJ|*Qh'CqEe8vq*NE\%jpdQ#H}C:}C:#;pJ}}9upNSԩ)9ue^ve9upNS9upNS9upN͕쩡/MF3(62r ۔\jdwPDy:g܂b(PC}gw&{A?CW%د Xڔu4<#aq Cn1H [oZn|jqdȡqcc}y֌ "crh閸7 \+w P}8?/ZyGJ'xTZmo=)Q[M*Ffd Bm9KZAdak,$-B'KGRYO&|}y⦃/ f07.5BiIBw_ htcJA]?s:<%B;B"WɅ>޾?ގƯޯC֟Lfajk rg6C=,#F5r}W^LJa ?aMV}@g1*<s6f{G^d]o+ǫؠcB Z}CVynlG4,6 HrIjV*]7;`L -6& t,m,˒?%1^V^75UͩK3EU4|6z "AlţenSWt̫Zpyu{Tw>9o>Y$|:t[ 7sF}/JNWMA^ս j,[67IQ$hQAWCWI2ԧW,^vqۓۥu\P/ |+(jaYy_3I]# \K/>E3s*h˅aqVZYGVO@/4^i4({{2*,{ Seb΢u".1kt)w$-J?VZ&5 dڄ1FcCkm$B{&@PY;}arLD8%Vkc&8XIm {#B 8kFɅuw$Lg7GsǦ-36FD4=ECu:ЫȴDm%}s1!y߼P\.4ZEQN)93Z'N`omFihTOPk~N+`R;ĬW逰 h%(Oc.*5Xd;GSdt L*-? "6dQ|BfS(6[|(Y5 |7uS_ͿWpFh_Wvh{꾽s 㧺aPWCY+$v4CiZY/\ƟGp#3So l6#Wg~y(jOKm~QTbEA@Jj)gRy%rj WWg/*j \g\B(d!v2QXG\qXt_DKey,![nu OwGAuF7e$~X'oӥS0 M 6u.A$!f cF@X1b"iZ+ILvLï9 PƳz/}tf]ˤ!3ϗ5Z [b,Y2Ѿ74l$-:_ WJ1GW `QLhnMwOϕ0P?sQ?{ƍ?cw,KIc{|}ݭzzL=f٬Wd+w4T;Bv ?3.יNH^ՅA5{ b[cB^Z;BC>5"}o)%/ w4J nеB&S#8rT*g8P 8-d{=i:F$7Yp}?nm]k>곛?.fɖuzE "qb4a6j͙DU,Px=b,93ЛpJF * bT "mߜ>R{V(nocޢkNdN:"XOͽ`Hɶ97 b:z&j38vU[ eFjZ%G𚼬M"U Lwم%%z)hwOStW8NbHP:>2-&J4 AH$N{]u%hW:rm4DJR0HHW`,"*ha]Lݨ{Y`ʌh1X`f-!<0T Y9@ _#.iӠwLqA.D*!=ds @ @GMMeD.8|Gj 9cg0MKC$KAtIF@SJy^u9w/S(:t6 i,qCY@J ֜{B@hgVt:vA:6#elgеWj5hk#gZXzP5gH*J(!Dt|3HDm_{_%Ċ$VVKQc`p3D^t 4ˌ?~T gKa* y "Br{4H5F_^u^ap,R\#b"US Ad+!9ezZw?h+=*-+ Aha|P++O s`DGb>ՒlA3G,A@RbEj |"$<1f qr<3zNdOk8>K$͇2B89vLjѽz$GZ@JU '?G8:/8dա(P\1)@DAJĩ hӾENM- fE4K7EdI =`"('Tp^ _$z㨳ýQ锷tw~dmM4l8\tl/vp{^|7goo>psDEѰ1~E ;zqI*ct.|SPm8)yrAeW⠠SH=x#OS :Zt|P\rE+LtlPg9 ̉6 v<+w;n3yCYŏ?HpD KY^ p(d4XlFYĝ2yU`)U: gf85cCOܕgY@ǣ4m{CwѪm)jr_y,jYLosgm fv/5^a4 ,lX7\ͫnm®㥿7=B`N9?Pwr;lj {WY^Ԓ)QBgDzOjgU㲽KW:/i@նж}_ns&twn,2-"zq:mCk\~Sʯ;?n>n料LoIc|oט[آ^Bò'y^ػݎ-*^8gB3zXuDdd$SB*_ceI$U^8EGX]6 #cxChnG^Y¢ߛ3L䉨 {ĿRE&zA4?v.` y-!bHP I1e)'Ƃa&hm * ՝"VC$gB4qMLb qbh"rJLSK)h Oä;6 ۥzM{lE_ZPW_>!şpݙ U!>5n O`^Z|P,VL͂oߏo~Xdr 7{KPM?]ۼj>Y|vKo1TJFբeSڛ-xQŒ'n{=덆߮~meD뭱YKkHj+~e4> [~\ӨԺjc8 ] _O5Æ%藦u^]Fv^g]޵(-˭=|IiʻlzrTG)P *d`  PKfc2 M1YϢhRNgҬhJ[b춌QYlaq-dm!lQm-m^ܐ *nڌ7;)P`iПLŶ+c9u:"*ӌA 2q0K4QrLhIuJB>vC\d{)M? DFQcU)@"a_]Yo#9+ve@a3m4vz.\KR RdMY) ˖T$"s)Kb0,9AVG_6V"حۨN 3ё:jMBF.ce" II$ey%DC;-(ȐzўG&D%L{1f~#b]H"NuTt?b춇S?oO c_,bT [D^-b[{ܳpٔvG Ot.Kynnpi)jb*NMPlg\p^CQRţ=i[E3Kg{ Q47-jt;(EL8;4Sƀ _".VdX 6@uF [I)W\?urRH y%ەK RYI*Ź#$:fXtl}>+cb_TA5xp b*)y291䘠bm&kAY,? _k©-'~b]3U^N.4}nͲk}=.CːY|cڟxz|,Z6gmG+vsqC0`H v2s嗺4F <\O)B{m"Ԁ'q™&1SQWPÅu-ֲ$at\`R c; D锨ăCx kZW+Yy^6I+9 f~Yy/𮛶eh_ޑCCp l}yN28ۨq)--^oYcEt ͤNq~|twv:Z\./Alukonƣ?Z5{b*n>GD".$+ݗ=W7 iѵV ĩv!Zp`2Qf$w]]tnW3cn:D# )owŜ2-v=P/S*-mY褢n{hԓduP *5'SB7$4q9>hyV)IU**|t(Jv0sdqKL 8zӗ>Ʃ, zozM}S)SkqR5hڷMd:q>{ζIK(-kG4@hSc^E6ٚe,{,{;"q#"q^Hy6Zq$9f&` ILH`bA62.>)YfչՎFB#㩎F& `A牖4$O'gl.צo`l~VooC>UdN`_:"X5rth]0eS𪤒=$ @ =dWCT!JUz7Hy zȎ~Z~L7שm;$Y(ךY [+_ gl-ahNNT.O: ߮OqVZ=xU`j[ Jx\ X+DL1VEm'AQq*zZ>~ҁSY1gY`}@dш)d KG Qk( J\QE9Ź)?98{>EIJ[RsF-<4ɾa8|\iq'HZ,eqsHk dzt(.z*,-}g)Ozq#(e9_js;ŕIK8F\qF"FV[4"T_r墱"9>P!٠B62ɽPSeTQXDV@82+X4 _JXw? 6a~۔K@|d UP*/Q0@RQg;-*o ndž!/Wx ] ʬ5sp˗|#FܝY{=y4lB}neVs^xI V4.)\Vne!ْtңҏɍW!2YE'\FeL$x"q . K.dqiJpd&J4#yP6Eգ7u Lyn r  2#z-JH5DT% GAͶ.&PF igRi6r:᭶&H]Ҟ-U1ܢR咾\Lg=FӴկ7]񶋦aOz[Y,垞|G=Aʏ|8qq*~Iid#e=Ge/S\1WY\0bR\e) + ,'cP\ũ,,V\AsʼnNTJr2f:K{lI LE3 BHqJ<~qΣN\Ⱦ+Rjޢ*y87WEG>,=1 'A2IjtԡZORÅ $ac=k#e8l:5p^} i rVT H)q+4GC73(j( D ݬXWSQٖtI^o1?}Პo<b bR|^*|3ת$@H14i pO]YÕ!UxYe_%sFC Ap4F IQĸ!g!"y (^^mR$hijb (w[D DyBCCcpR9& b왗1ͯ"/:Ь{z] 8NQlh2jpWるBכL'.\f6.G78k{oo!6OqknlB/IOޡOmGG7/9bvom8MZ}p/Qq契CN|Sy5?݋~E=pp/S0G :;Cwgm+WyߍyI|*~?̘uha<7?o.>e..ӫ}ԗw\>F uzq?g.tt\ \`E6XcI9X4r MgP~y:@jx5@1^ᔗJd: uq渡<)b,gLF#Eˀ(ÈJTd|Is][y#8LpJ'Sh_8U4^ی3B~aBE O7YҽB=!fx$l`y\ғJ,jw9bsCțQ JD28uA8=)h$5eZsD A8ehz^ q,tJ`U JfeD#]sFWPrq%PkoN*RI!)L2Im~=HelLL?~ӘnZ36Fv͸E6BҰ.ĝ.<.\zܤ8soʛ5㻱>n^=eOx5&T(MP$$x#e(HɱpRM&L*LC'Z )M*?1+D{Q"#a:0- 9Ιl~l m;ڢI;1iPmHB>d.N݉'.pr4AH>o)1}MNyQ96Vptt=dp ΍P)Gf(WKM)0qZFe)A@X/5^#t1rj"E\ņӭz~xufY ݀񝽲N=:V0u:/ESBTJ ÐCJ.z,$"sI^ ߌS =:=[qȊTxG$ȸFǤ"(Ъ03,*8` $'ZcPWaJ2&a 뒰b"Q,1,,/NyNtZ_@iƖaUGauLS$1!T+ع)?,'DhLQrRu 5,HF\8R핏.-IfA<8i)Xh!1x4!S%Uv!T]8}Ymh|o)T,.g+`;Џw55es6_Ӱ)X M&ؑ\)꿕0=cd&צ wfeNodF@o`|SZ)9bٳ,ɪg㴭VsV`Xjl8|s(׼ 9{omU`q!zUۊEnf⇟iZl *pM)RJϺ7`ޥܴ9JKcI``ɰ~-[X]u"[՝x>{7?4}X3௏X -.ܷ.#EGM,0b1RR:f-r,ŋ uyy1p}x)gIڴ`ع}̍2 no\NC0U!$rcd6H*h "oZGVj~(M~O,d) xF]9'7ZPvF>k-[[il&,)jRy ET I"B홈A{dy\/ JmL5Bۏ`(鼴Z5xLjk REXXc4J! -FarsݚSH%5FSOshsAlK<˖IUL_ڼ ? Df{\\6ll/Ƥ=#Pk붥zꂡ-K[5:z0;eU(KWV▱/@]<@f1grHxޑXTzX')`-KZGK!xL8P=d:FY^1THV3X!=U=b3Z3>2|~[]5mЮޑ%7T]oRsD}l ńH3*x-U|!IiGi}iz?URlqhC P#pA&Xoq5y'_ߗOn Z2Y+.5eDDL ` Xy$RDroN){v)ZU'v^J6_k{eS9}uQ`tY'7ܴRnd =!"}Dgh߆(QN})XZYhXބ$ }g.֗M|;^&D ZlOT`àk3ͧF;l\ f'{u'kȂ G|2QaZuWf'mD=ʤ6 {v;0#l6LWn{&k4*2^\~2/}}~3:57L݁I*szۡ`m7)\zUT3~wfV{Kˠ'SVʸdUVIo6t45U+;BchtkWUԆ޺ˍʓBQnpPँW >'B")@;*Jk%s^iG6R ,c+ Ws"xLNoyƮQå!5ZZ]잧R^wGJg P(x2(]l  f x a+SI dleO[eOږeӌ(}'Zv\;iE,ˀ9Q3 qD)0Л@TQ!cFY*h%{ 3ø)aJMS'-:G#.,n 7{>mѕ_d7`8tOmklnttxHxJaWk7Tϒ(.1 5Z[iH(bY׳DyE 7\Gf(8 N`=aRT0F8 #>iGhH+ŝ)F !I%i Wj*e;Q!(8v(. qgclǝ㬯ĵv8\#$Qtn8ɀ -B:(}lTV!H')-#tAI:(c;-9bJQ /0X$M#߻\ z4X60wp*jXbiLElH%1QMKc5onrg8S02,7:2 M#h$Ric%1FJJ-BQ%u2"RE:w Y~-@[ A]8H@p`4oC Jdg6[TAz&^S> Gw{8;8/{bߌ~J^G>NڋJBarQPnPzlp 9+ 9Ѳ2x]X \p!EL* .ַ-bؼ{ϲ Lu lN?̏z`6Ҿ\mЅAvS'0fe>zH0K<؁TޖU[t_EiZfVMq’hG?3 B*X?&xgsm$w܍67۬.?yVa)5PnqΣ ?JY( 3\הЅ](0)wk3JgRJ6Jl7K`B Iwț}dD,UGK"kY"M>V#*us5c̱|2玠~M{Ɍy|߄/5u^cMWz1b;_*Dr{3:E<)VI՟J{(f4%xgv0䵏7 $& }{ qF"֫\CZORB`8膴|["=VmN] }is-WLA9oQMx n=7B$IZ_Ce9=^xt;k<{5Atwwg+2(qI*A,[}FWb SQ'v)B$?' Rr2IO\v2IOCSR.+LzJ,H غzbٳx;k.Nȣ!J |:q$rx4Z$*7\ .{RTV,U0V0#RKSo|*%L#S&s1әZiNLg*+4q8ura99\\ej_*SԣRv+I  oB< _ _ŴNcgo= IB( /+4qBwo+g$Eֲh"Lؙ>Z*-q121RHݴ;H Cu"dfƄZ?7|;@.֊U]Z6"N/S57hc.cOoQ׬}ȗv'[grANjWD2^xU!Qۊt "Z,\KNӄlN%K:'(bsÅ Q\B) ort8|Ӕy>paΑp5)nRYpB2kR*_caI9Y$PYze}評iT O*8ѻO{ 4r*]: ͨ?7J;2RAg--tx卒 R", u^I 6ΔRQH BC RI=x)}W-392?Ofu8 eGw S W#ʍ|"@Q9b5F #uB,"z4E|raA]`W10:)mA'mp"+$b @EOPz W^Nb$lyW%/*c-NC8;U{( 'Yus"[8{nJ"ٟ̾.&8*F04{v, b*Z:frUGuˋ盋KhV\LeuvwOp]-Cl1Z׌z؍.D@0D2{,Q-E.2rg:A ϗf&> \^-2-]Z(ՉzPcR/]X7ӱnwD"$w;(}ϼkVEì @77Qd(5 qf~YE\7HrF]NSVoZ,vآ[[^$_U_ѣl |.wN(/SVx JFM^'P6 ԹlϽ)L-3g[LVDft[뼴&A/6 mʨ Li"y,Kv[Ll10y'iҲ{c1l4#kOh>*'=+:[$GI~D4bVք?c,Қc"5'I(¡q u aQK'U3_xYA 2n74gKX"蘭=F^g}o$ߧ򠽣|E=yd8!I$)nQE%`=ۀMc"D SHU$ IPDRyc@=ͫ!15~W Oeh9ݪ:!'EÀ!mxQdL{Z](Kp8//Zb_lR Vo.KN÷ۡFȶ:Vm$nF#@8,/}+PmIG.oQiMt9Nx$޿pqH> t> 1\h,_Byp2ĝ4Q1B\DסVP ZbB $odTVnltYӧOxp'rXW(s AL>|k&3F&WsɌU3cd*-2c){!ĝa¬ bW!"e8Iig6ZX wp 7̼W@H1J4>R29# $I ,r#L:)S [5P21t zAy$T,N'$+y44Jgeo5r m'*sC _Ѝ0߽RMƣrΦQUUgo]7]n&Uȵ,m>ۛȭ ƍx-]^?-!et.u5ܺ[o9,wt:k|of!,eHncww8+{r<*P~=y>}d~s]ߺ#?ʨ\Ds:i}cwm6Ϲ8n9W=n)eUN(T}l:]vT.?Vz9^'-aa~?#4) V.&7: .o_j>E$렴ELLrq湥B=2,Q ѪeTF$ׁT)C#uLֳ 9BbiVF4Rm[mIơl ig j 6&y%b=I{We ̯n0 ;ؖqe,NGDe1A&.q& S@-N1tX׮'lp"e'ᕈو0rblb‚*H$f|zu[~4 ضT6v`:@ja9-5QYWZpC(4VaB47 R^tD A1`&(ӶctA;ײ=lrG_`< XP[E& Ep ƪ]JDjG@L>{$r%Iz jSTڤ4Q#@Q4;ME`>g5rv[ĻkW.@SZlMJe(:m:]<]<{ؚtjuPf`Z?m3{ׅCP8$԰g//6\o-3RS`B$/ o/x(<}īڒ;'O蠟lwW U!!o|i!5 8&PP8"(xi[$$uNZj]TN[DQ^X"%)C$ $'7BItmFMRѠZ'Y!(\`QOE AKHF 5VK,h`WFEL1Vz:on]^\`ۮ\Co3nzQb S׷&_7pt7ܰi_Wu=l3olllncmkmGeq !-:B-GTFz!RWju"a,T@l᥷Eb20tB]W;C:$i .CtR+eB\%c"tJTģP%F@kJw4Bm9^!>km>ymnz/` Kڂz?)Ry\(9S\2 4HLY b0ҭwWjb jn7ZWS?.r?ͣRtI@\0(9D1%b[˅:T|}\Ӡ8Oy;9N:FwDlPcF JHHKٲN^F,N\⬷%-7s`qyKns㭷I FoF]Sh!߆^O GMʁ2 lh>Yi_?7aWߟ?,kӶ!?E=ʫ8Cⳋח B_[[Lh_|ڪJ~nCJb!WV$kw9YB=qn~Et򇟖df9РY qZXB9ޏ ;yԻ;K(X&Ϸm:%$vr#ymwǾ`iwADDus(ifcA3P *CDC$@2I͢\h8:уy6|qƊ?nXJ'Hu He5*7HnVpAƊ1\xKYPBCȃXLXX<|Ϲыf/~ޯ€F` b'LhU$èBѩ ko< z4,B6W.Q8NBȾwmV8Wiq'HvwqBGBG`?Q=:|;5ס5"Ț4xYtP~&ptXp1J,:Ҷr zj yI4,0` +6q֢(a1η7n6t}kv.lPDE,뒆$8a㢳r\P 'vQz4?ЬfA6Dz6?1Mp{,|;ʻ,OF4 xI|vq>R.RI&^"%4_pjP5-Q U# U 6fLFpP5%brTlՌ;*bTTXzS\̽k>8]$VykueE읞絓7tߗː>Jm?JCW>qz PɌM,E2]5ր [G:DDEjpU 稵1Rr )VbM5d*'h@DZyg &n ~g=]\\ [ެ~1ٻ7p+&fy fCP7/e&8l ,PIevP,ɻ7jtˍwNlYT^ *{>3!=ch>r'&Ei9)}Q%4]B¾@RXhWC>T|΅U-!(NR3%@8d>0\^K LZ#ZIr"GX)oBꔻⷷX$>~vO!]%bܤl:3+h /9ވf6h16XjW]Qu8Da7QWK!f a:ADdt,Rk@g]&&BISF) 3%㭩(:GrHd ci4] r;3T k z&˝JejZ#`*qQ)hDTqIlZLa[l-*]+- 7U97 fAl!5WWUE؋Ehsy5'0R6i}lh Ey_eWkJAsdc'~[߳ 0#2^|loCFl̪찺딤{WA&Q$́XL!Q,MlU"QVD!%Ez<},S,ϻ7k}6a4n΁b3z|!,xCm*UQfFzZl٧]B}(Nh2^aě}{ 91[wncCc l*̔)9|5TS i (ol@el}2"g{8b+>+i5f>?yЃL񷼘LO{a@ލ)"j~uv>qc 4rh5p"NU6ը&mdLy؂iK4pnpnwϝ`iB_|.Tfy}yJIp"dm }ARp,訔dr? @SNolAh:9?3?QU-^]ڝĸ{z;/mvYdR{;$xĠ&>uh`eU  ~s<dfDC5l  9Gi*QNj-ˋYne2`Mņ ¹)b RRcmojcj= PBCȃ6z,sn"ً0  )pXI+8*Z=0P-ht*eChm98O~=0Hcr 46+k, NKǺ,]?U n]qqсQĠG9~yc״:b_~Nv keA+8a*E茍J) Yp*NgKj5Zψ0Eu~8ku h]G.Pķ^o׋ׇb*FMETT( ɲ.ijI6.:!KDO}wF7A 4Y w^tm v{(yN0&*w[4~3HƷҊI[4)ԊGlEZ?^,OW Ճ%:J)_r)$k^"%*%s_aja[ƮFƮm̘жk!8KتwTY"oJΙSAUAwB{[]vn\t}i)E*ssX)ɖA'Qct>teڒXZ,lEh74 ]@)zZhCL:8BXS لI##&Q#Y[Y@gWu>7_?t ɮ b+&G؁aFc3˵5]vay)bՒ= Q`Ֆ},;}|8^3Ic >6>zgz06xZE9t*Eg$e]֭ˋXb 8ǀΥX)ro}6{ٺ`Z8{je*P^X 1 f dc_ظ0Q]KT:n<f{ı7IN^yMzuz]t7-/Nۆr?ZߛK>DrB-YA$j 0 SPZͅQf9$r;s] z. /٠8bk .X5DpIPk$biq1 W9/Q;Q)S1L&e9|^[X>](*_sP: j_ZiM ?=lIZe!'B^ uYI&g*g"!;L6  H96IdzݭzM˒ زWd06Mn Hq -H!B"(pJ3Zijp4jb$rg~j 4r[2h˽!ď3vٝIjr~O FK55ɢMgOcgZYf"3?3Koߚo?Ƨm|2ӝ%$7z=zg#o&|3ݜ.ڮ]5JɨZ$./k~ho㻶r 3MR{{p {ozMy5Wn3ڸ`ՠSo{znrv5ͰuSp^/4Cjc?p:)-XZ~= '_?)jz^x{iך;sc$CCK I.KBK~70krp֦[WGl_^E,RJZ6_R]ͽђfѶ.>ѬQ{J}S춃Yӛϊw -Qmmr-2ZA߸"w#;ɺCfm^ VYmXò'TZ+@Q}Y+M{$f-pV"-] Ko>5I9V-<][%:y6ï-S8ΕAJrXB>.ТWfZB/%~Jf8l_u٬^YS=5'ڨw*w~9͜|R9T2<+`SZ:1;*@¼ ۝y/g)|Rޜ(N'Iq%{dŘ@EpL D) '(9BLr 8&@7{IPDRyk@=!15"/ׄECrF-icȉ* DžO]9sQhyq04q ۏ/Bu6TqU%tuU..Vw ;|ǐ} Azc٥Byp2ĝ4Y1`poQNcEX "t/ġpfe| Of[{[L1b^ĿZ%|&)*~G%n@Y)BCevk!ˎ^*dҨHH#I&9g$EIABvQ p2 м(1L ٬7^P!8 )KDyBGqVz=Se1qv,hE8eok51ʟ8;ݾXSdjk!ùc51ekû* D3vݒu; rZ牵J{; QI[o)Wnܘm:za&P )susI9{dK4[o͵Yg)+LYul7Ug9/oϼuٹCK)7f}~meޥϏi7-~Vi~-;`8ϒjnơugfT~J;|~t -innLbT'4*Dm>\Pكp*Yy$w8t^ˤ+uu|luaz39\lH>C㡚: aAuPZ&98RR-q LE+#E 2b E!d8/i/ 4Dqܠd#ĄґcK} Yl]:H,$g*ۗWAdQT~47[@Y+-d!SxEʆIȆ\FR22C*PVCUS4 qƃFD<PfBṡҮ_C4M'3CCZ,K0.;\W O""qkYvM p/D-ז8OLtx(xXw<ua<'G ~dv:sȒfGsHCFrek,MfmFc-RI51'9=&]%GQJ\G8.~vW8qc'-7O+qR%.q㢖L| 8'qMNw^ B&?FC] xjMj>m35qeO=L{ןǫj<rfƣi6QۏghEڗ/+cD,F{xJ̪Aiu|JgiJRݡW҂Z Z \eq:Bi%9xRJ21-١\eq>>z*Ki:*Kw;h8GG?v~.{_~98NpwrM{ӹO*;߻;yb>`.y1Y )8ﵑD=^*RJ#hnW{R R٧Ʒ'6L&'ۋӓjvt=\r&@/lIc{?MvV_k⺶L%6w"j:>,hIUbU؅Z* 9-Nv>0k Tv/vs'FC(O7 r nm"qc( Dx0H1!+=[uv+& gat5DJ0n7tXq^oiٻ5}6^ G75Ybl?*6/g^7xcz8.|& nOe8PKMCm 46vJ;CM a`o*l޽PI86`O~?-F'D3bR5]GN% J:-t*DfWJ"V8W1(Gd>5e?A{fq,ˈ9w{{Z#wq:FSjGn>gur3q;ϩ=;Zi^ǟQLRp(:^y\p_sݑg&G:e@CRyIHҚ(uTsh%GT#R@P`ZaLj!9˧]"HB<"B,9R&GNǣHtx&L𺚹ّm98 Vi&ܣ`Ij4QB-N,D$/qý @DŧEohOT?@iKS+ ka.'})8+,vE˹v37K1\89E;ۨh1pdr;2(Z d\㟴px]A;<(1{) l$x!B"Ģ 9(% !;K"i.qЖ eV1'jnua<﫡յ5i>I\~ ),eGZpk v^Q"#y[y)J8X*7Y-ZG]™hep#fRtTkƐ:p*Rڕp1q{y 9oZw㫋6:Em^iϤYZyziB ^<+MN*f #γ$Wya;m1e|_e|e|oeIBI;J(1B%[d SN!T6q.2:$|o6bIPDRBHX!1524{1q(tHfH ^Mg^N`4>;:W)gPE]Ypn+"TmIZU"IWYXl t .(Dp!UyDKɔwpd99(UM ,~!6噕m0'Tٷ[yv~sQgT)[b7v K¬RQ ࡲ [];˵eG/^2oiU\$$3$ ![70LcB+4/J/uML ٬7^P!8 )KDyBGqVz=Se1qvhQ<~#oyfN[ngݳ.rl!F&pYe"9녙B17֯c Sϩ&:޵$E]r,nw0LJ G#DIN9$%K^[1lfU1shwf}CYE{okzv'C+oqvw;qk|ύȀ;bٰǚ_ +{};ks{Oҏzls&$Ϩ,p&X<ƃ\ɞKYL|L!}Yo,kNJSC* oø#F!鄇񱥼{ci%ǣ>X߿\>Sw8_)"E렴ELLrq湥ԅ'PaQ6k?QК/,F0=_jw("n$#3+u, PA !GwҐG]_|l-ŻGn >.Z)  u^I 6ΔRQH BC RI=x)˖B4LjMeD.4N 3x%LZ-1r\y"39-ѿzk\|vS'Kf/!~͙M`8,jqfT͆cj4\y/ʣrnj9VY{:|x~ma2} gk:*]Da9bx=l5z?'_~%w*%v:U7Y{fً o9_Q,G7xśz}X f1_u;v9uZ 7-f|`v]8O)]ӈԦjck\[O5 K~/}=x6=(z;{d8F?4Oy6>F0|vc~YdMaֆ-Yߨ^..WU.RJZ;}nvs%- hrkF|[|߼vˠo{񁖈K\?yw6:3KupqQCF|βpCrO\\,]+v$uRutEW)'~(n 'w9jeh>XfS3d9ڪiXqTF5'gEip2VYC2Hْ@:'76}\ں|vpdm2L6P^m뭃1&a Z @Ml*)㉗7پc8n pU],4B ȌQ{kb4hFw\[)mR49%مamTdidd#VJĕ|FSê8Vusx\GS=M4Rғ5}Cw9)p}W0DJVqP$Rq}%Uu1UI)}=G~aGp!hhb}dU?HI( >7@ :- J0 B2*ZF[Co0zX{Bar=0Mx0qy+B|7_[݅~ۧoǓ-&ӾɯWcv'`8nbdJDžѴp-k5mOsٕu!-Q'CZ((1i ^e|. .@PP*JL(5u(u=*Jk9@+IZJW (po2"h%TInnhZ966o]]s>c|֗2j|W];mzO4VPoimN8eNJp%+ oΙ*PNЗIe%a} }{ٗO2'y JHҎ⃓&IDS]ޡ$=o<*%A;X $NWk)IYŜ` j"Q7s``ԻP]M'_nK=(]h"j̐qqa=5y: j;ko\]}?G*E+&/hJ^F}mtI&%F1juך&4$I'[6C@1h( 3m<V3RQT*!G%"s‘TU TwQhH$Ҙcaڨ5g\SHF@ %' b(F~Ő_'VC*9%:O3a5v[^YyڷS;aRO[gZTOp^%@ 7"1筧y9,z4jqָ -X;^<]5ϋ<(tʙmF*N%s`[Бxs\;I^<0|yg7)'^vI;m8 'JKZqG߫\XRWܚ^I}'kIuFka][dPQ8d0Җ'R %txM{p5n6j4( p f`Ct*1\Ij(&<7\^Rw6I;R]pE#"Q9C;t!`M;N1t̐ߝv\)AQ`w6ӽOmFԶY$nv~fby39 pְiz9e0r7;[8@4WzE^.NU *v#;ۑKąUϸ C)|OS(ejWz'm4og@Lc)&f=_pVi.: f^'\na2yR&(F>U+qqZj96Ү#墦L pF]Ydz{uEt6Թ>x WkB(+QJ*+uɂ.> DIIkIG؉@|*$,ƨJf"WN@ Z\ ;`h٨#y<錧@}D;jD=2"Q8!('@T<6_Ck%O$?|] >w 킌9d)X2۵c`|>>p-6:iʪ޼~"ZiT Ϸ/).h`k6񊊈G2yvI@ȓ@ދks.1$(TZ8$BAT$^bJ}$5*X W[HIIbPEą-;w.\MFp7Qdh\1X`fh-!0 ]979[ IDz1~-[ H/)A,%Ƀ"\HUBzԜz)0A,kR䣻ym@w;gʆqRI.Yyt7 ftr.Y%vm&GU伛2F9)ॳIPo FcH"%+%sZs YXdl !ǽ)dԋlZ6% ݁OTHӻ{w`݁;ww^Rzw`݁;w^zw`܁r!OB]YoI+D?`½nL1c4 K|-odU/KI$[TU2+2+"8Vj(]?N JhHq٪B;U96ۥoyVeswTWs )PXbPGO[1\3ѨhtvϠnوslWՊ[i]iYa:jj}_D~*+&/hJ^FxmtID^kFӚL$^#D#d^nٸW bDh( 3m<V3RќX$H Hsb w{~<:R)kb]|v㈱.TIK}`@%OAjhԆ`|"T'|B$5!xŭ饾w2>3B_(m_AdKJ< p~xtgm@F4lN%K:I8(bsE_y8#:gr%صf} 7whTwlzb0vL݉IhUG= WUf>ۜns ? OLR +?cwLP0{/j ㄷ1M61Ӻ =u_;e DtF7/ۙ/iioƉ"P7V b*Y .FB;_BLJ)'ӎkwYhs7xNd8^?X^=Fe m5#mdg፵05-Hm]Izϋ涎D8S* ;:6a(8=}<ׅ⹘ܜ&n]©⥿xq[~rUn!8=57ClG\I732E8R2Ut-gF"A<]5!L u^@G@-՚mжzlŴeܸ@պvq] ~n.~UAU{:v4d4m>`N 5ح{b8<,H?^ Z8_=犔/^Zq d\d%'4 t'<pJF * [9˹]ډ3_g7sre<{oA2d);1;쵗OgϮ6~`\* 7/d^K¸@6iJ(!Dlr{ Y*1`$VVKQc`p}g%״@}I60,~~bw,=!>dj^J/N䅷X*#ΘJ"+U^ ))#qvGm=zdw"-OYB!ȹ0HItTylH\P- kmݯA s׆B6)Qkvv Ϝ89lE$EȞ{wm?ޔK{$-n!\{tt!td$dhz0/qj_\x9sEG'2GG`L޲&'p&&uL (i)&:q=5€Y.IMrGo΍iy"Y$C&"J\8ʬ6pod&g4>Hm[dwMm4O_mr`oZ6pP욕eKo\xkD+MxT?"5^E+A|Һ&k̝/t/tBIBI7BA;8(T}' ) X-b@Ku#5|m>g#s]wfh}/;޵U IRQ?H#,p@XKe]'ʽ:u.=5%(r YT2yU`)U: gf85cCN%9%x+zr<giޓo'P,ey!˒j\[s˲6UX;RDQIFrT:Vk(ɞSZYu]zb KR;-2Ii U^(,V* )AhD*)]P\/elIeD.4N :<ێײ~p}cL${%BN)"2Q 4rФ1RJS2x wS\hfi\4ʸֹLІ1PD2VH$Re-#NX3T΄44Y;z KK Tzy|u&DT P=_7M(FizO?BZL ΕZjEC$WC. I1e)'Ƃa&hm * ս L΄Di6='L Efm@9 RAڞIytB}W~ ]c!~iG`8/,j~g שߎ[w[3uhtm|9iڿQ,VomfƓW`8T#Oyyg!3zwOWwz CdT-ϫ7Z&k`j i*9oQ|,?ƣoWWo~ы~kl3:{;qu5Som@l 7}w7㏣6wXo4"c'.CpuKO5Á%?M~7{^x=(wnB2!7Sx&8קy,y /=ώqX˥.FYK2Xj⦽T H)iE>|j>FK[ ׌(Q| eK>b2`w{p99,2Y\&y] >al;,uH>ͻwLˡa,hS(x.9(B4Kчnc 5m>l0Gܧb9:DUӰhIe"kN@Ok{&y2P&e\M2H邶@Kߍjsɳ1‚v֛yl9NYᭃcL"*A4{@F@(T -2&۹>W|\#t:/-hЊB[︶2*"Sڤhr޹;S#NU(6P C"sy*NWm :UmU7e8Z?3"蓩+UԕԚו@*`}]oB3$´rרq# h҉`UŌpWyV=̫@}(cs%KNϖ,Eq yJJ[(Fa[nS u)i {%sH{z|e$ _R0K}- YjXC86uu?}ԡF@RťVHUVWTNMiY ͚S vWN)P޹$L)q' GCL6soє٭GEP jjMC?%<{:o1޿îvl>zy!|ҮkUZYJJD*k0hmk2𲇗][ :z䨌$3@x1(܈"N86BR8%C4 } #Behugm甶&r/i\%nݤ8u@2_Hǟ[WMt:]9n>g[VزjnM~w޺Jjo~w+rx^g7_=qj#Mi6Fkyϛ#u[gL~IziPgA[zBiyNlؘ^  o3vVɤl%TG`_i(pR7jK9td70#|Қ`,9FJ'y0IJ0iƙ(K`.We J *-#VyP Y)= 4($H^kbBR9,vR;q!,S'JVQIl6 'XT+e F1U4d 2` %B=',Q QT3*S#~H11qP@0Hwq iٶHV%@MEE>5 EZF Syh$kQnvw:<.V(dr`5aae5FL؎4f/X(,W,6CekξqzybnY.M.N8b{y2T 8 QYM09ZRbK%˜B[yH )RM0̟0$<1 / }FoJEJ'cZq1ҎCAm5`7eg S Ahj!᲌ZoHEcRL1"ѶVa6RtZ"{ % ["KcDT4Nuu)flީ_Tx(L?EDh0  ëF#`&P0دES>H}QGg&k鋱Ȳg"0lRbOZ`%&SS١P3R(˿ew*6xڨjf&:TVPrpTY*&LXwY҂F IiIDhs$nS^zT=И)*keƇjP c)5&c8acSIɯ>2UoxRTA2;p69bK=BЕQh]r&tD9d7o㝟k9 ]ڣݼ7}5 VlI?C;bgצ_N~iW}Nx[IJэ=SjVzPbmGx#Q\ڬÇ$} HR%쵰[mkEB4&2(g0'׸󒗺D7Ki2d)K2+9Hkb$:{^3q[<Z{ݓbڦT""|ϲ_r\b/mֿߋ+FӋчq(xLT~Wsi-nx]9:ngsxr9^gͫz_M=e`Y:X.h2cB,uBw1EHѢcNjk2ME4łPZ-BR<8۷nҷt-v Qx:nkxlː[9ӆ9]+pmi8GbGZ)hώfiًҟG(T2u u^:kBPC UPl), fEk(Y i)A뒀zДZ0].:s"YP,UD *&036*Ә6gPyo!Tu혯{Vr8_fz ~v'(t*]>T"o9Ro/;nf7ߞs?WÝWZnևm~o7G]yߩ5|ˍ/oߏyձئ/nR/6^ʧ!XQ?o`j6d}`i:yUJ4Cև0냱hM0& TR.whHw)t6b9@!wøZ@v0a,JDb5B@xQ`%B4=jcd.;w@!mflީ_gx(L?EDh0  sF#`&P0دEc'U$-|PS$M;)Y#H_@8)&d#-d{ -+128;=<Ցqq~eLsx]r$-h0P^ ʖ5LP.2(kgoJɖ昒Wt?هi/l@K,۸JB9󨙓X)-[D,f6A++n_?RR}0cI >E4E^S0%9!}K#L )#JnPGQq jkdJ.9j sJ%\Ր !ZKEPT6mH eER@fiT$!7X;on2^kx5/@F֓|ǣ1O/D?X_,MOߌ} #ul?Ϯtq5y=7Bݼf<'կ}r⏵{5:N?;KХI#ѿ˺k ?'/nwNjj㏫7^&_S#GӺ_Ǩΰ~u\hW oϕXm@w|?ޜˇna+̧^mɇ \iO횙IX W,>^Z K}ZG>\?xn{3jN9~7{ ޤ.G{?Ӵ}OA_Y8L-׭*wŻҩv(kQiVoR>3""ҋgݞ++xzEn}(򁳧 _][Oc+<͍/o3[gK#iyp@)FB!+-mZ^Wr}(JQ\:4 䜞6Ҭ]8*S2n/ö>&65C7r< ppK~tVC")v@鳱/& 2[SxU$w{?s|(2*%Mr$Rߣ@zuKxZmڡK >'rrzka4|.  ٶC;<) MΜl'0>)S{t>.3̦א?au Igc}@(2%G^l1(u2\Gi+)C%`- {Iio]DV+xa^j@E&4*ŷU=x^TɦB[Ouq}[1r*˱>oSa9 pdP2NDBraRoԥޚKSQ<"/FyR29d2a)}lS8Fmk-$a&oe)PL -ID?rUQ3qGАjGlByC!-'4Gw&*x r}}5>i<9)uY_MpG^0(:0dRPruVr,Q8RcBK)*hp}^=2ufkVTf)ޒ6~Ie"w! &(#vhO ֏GUP"hgs꒡eY@agѳ"u= /fQ+6ܶ@-vgۨX/9lUG >N> &[༐ X]Ju4@"+ R9]PTr0ZyzXc=7?z-'},DeZỵ˫ <R0~5!ihm49}ozwa{vdD7<taEAy>?wnx2 wS QZeJ)-hDkŶVU hu\ܞnz6nyiKWdld!ãQeYȣM}2(tH4<xYAFD~3'G?̢6͢ENd[MO>Nf~53駓uZlz(j]R:^ja7 &Ws}fGl9-.`̸LLXBAip#S݌2b-g V>;ܖ4HoGac%w7kB$N|fARFrނu/nRs&o'E-s{u}]SǖU#Ө\}}vSs2/!yvu(c99)~D|jAu J5$\^i^["]s]]^ğ+M.&B.a=|6}EZʳjP? ǟqkSyU!gn)ND̪H6xPSL/3VsVH4VZ?;5Ost7wsP:ogZhi~iwIMy՜Nfjft׼ ?ho8ٺvaBpVK2#g֨Ǔv9 9,U NZW:y2k#LQn5Kߒh%1X Fq`x]|rkh*X<:Nx̚Qe5 k:8jTɥnAao/,¾Z|Eהj oGV@(itpJ^WY Jw([MhHw4rX@7m&J/Sj৆OU(9yA9*;*sVBP&ItY%H_Pdeo@t"a #w&@0^KQ1m&.S<$^ .)Mc^ydJ+ bMmA9$({uybh]0X UiyhW$\y'>gMi+hAc kA$'ggyJ)>8t#wwy;&fLꞡ&E6)8j},=TwR#V~3ZG"&K9Euց2.IƸ8O<o^Jnxr!(9s,VJ+ifF%T 3/6kq9`7@>S4S6bFR+$7,gJ(ų4|\ljGC6nZhA[QbFNAs>wB2Pt6 RϫbqslHsL(aK*u,B<'&r'=Ȃ-VB*gP ɱ&twv7m- ہq2p6!xgFvoُe@/)Zo9߅PRǴ=xi'cj\Ȕ"r< lAoORjL%agdjȴA-Srzyy38zv"Ŷ#!-GDT6p4HUS"Ui:%IH =sUŕXUvVfJ]7W\Swy'n*aճ6:~@sli!s|HJZ%^-="sU6hUc1WUڃ8ϕRq4WcGd{4护|R*4WJpnWjfMAsUfӸtJhaV=d:LˆSN F:5M4wb$))G5-,Ag1,żwɊ;yg1,0tbީ;yg1,żwY;yg1,żwbY;yg1,żwbY;yg1,żwb޷2:yg1,żwbY;yg1,|IGŸQyGv~jwbY;yg1,|x 5żwbY;y,żϔb;yg1,żϵbY?vbY;yg1,żwTʣa:{,LP*ЙRu&UyCze^8܋lP"hg"Y N rPgEj$캓6t a.Gc !7 x)Ӝ.Eh(*9𼒭zBq ׸wnؼ&iG]t*, f۪֮' =yހR͡ޗiku,'>O1K)M0^p*`f:ͰX˒+~cJvm|I[顒0N RIM 939jIm0:;mfcʜ2\:ܜ n@%/.dm[ͨni$Z_bbSoUCƷ9I# Uh[mk"a yg6eQ8O3;`J(pmx : d/>_0 ,.JWrJIATl3yz?<-׎v~}첧Jl){,AZ0\_OVȊ̒՚,:'^Bݿ|1.qex# xzsʼnL/,wlR!Ձ  rL;ht0c\3.04lKKnh_d7S k駭I)ː$x 5E(*ng XJ=0HC[wŸQֆ{=xx7J}!y{,ui$HZjzrq@8ԽrWlQhPd_Yc P -:~^:8:,8>^N Y.TMdA1xYCk^#7o_]?,SY[宁c!(.d$R`6cA3\PDv*+ U'+R@B$Bm kZkĜ妰fB"w9!) ^d! %o yI541C{7hw 1gj۸_aS|ހ\w{NrٺV*~Ⱥ\xXI,[߯13|8$E %Uٖg0@~ 4?GLcсW NRF,07)lXj!SHi'WA#J9PQ&wIohQ2O})\kA>j/q2pS/U5>y91]5Yܑ'e6VYRSs2c9(<2T޾5Zk7-U4d2`)j%j.8aFQ2ZEF!Oi:$NBO9d 6BRAmdlXqƾXZBaApNQ&k ۞rq{^*P`t;?*gFlø҆S 2`,/qU Rb -,p޶\$pg#9T2lI8%0P2h0Q|Lăn[#g;b0,GǂڬCNݜjY҄2HA&!(s 1Te" )E"^N Yﴤ-d @+5!Xd+Tu!@<lۗ[#g;F 2 bk/"w!Fo"AA8Dmw6%Dʣ'T%1atii7%hm$@"3θQEEG=ZL@ϱHN9;x\Fj8)j:ָd_\-pō.NY#EDưh"L~ay@}!T '4Hɧ!pX5e<6վd?kWq nUNp y?0*`i^M LR·#OOh(f}b,|&q<qɓ74V$mD(h̨Bmz=VKBƽXe[ʭ 뒰XN/NJ^jCWHiB4Hݱn;ni)B¹$(}v:@3Rdt1e('LkƼfF4NZ}>Q :$C $+ĈQ)h*ZbƴPI&,K.{~N~CלS 0P>!~~KʇC;{3TLCj0];_E?WNs:}KhhQpԕEF,{BJ=^Y+J"J\BP"9ϩߩU̍06~Q )Q5v͠A6h d`G{ۜ zygpp~^CyN(LRmX=]Z(Ց Px^rQb>CSDE> 1ٛ7ҫOU#sG*iĺD4Rky07P}ʸZfُ$K#YO=<ZphS|3VYc-=SRУvulZ!yǘU҂59#!1"X8:>J.JrL3D8"0 >3:iM,Vl``dPDz@E:k [tGL1Mhn{B. ^Sͮ:]UyjXxS)Q*Ms#45wKz҅r19v>cDrgwz[]h.[6emdl-rx*\s9~V=*̇^UymumWy7h '#cwy󴴌o^zXJ:wr>;|sy|ܫ?NoG5'>_O96),ZY l.U0y!,q'N.S ϟ;?HR܀ A%Qc *ǘgR:hH$*,9y|ڍ!%efZ* $a9ˊI$ʧxh9M2 <5rQB9K;^L3ۯlH4|uKj/_;^U0#ٲŹVTx .A"I[Y&`;U{C~ M(79%OCԨh.dN{oI;k2ĭ0A; dN lE D.V+Iz .'Sgշ߶F:eTL4ˈ[3KŒ)B?ᬃ54&ީzy%sFNEE Ar#$猤(IbːEI+e  v"qhyuf%^fT]»W^8IU, k;77TkғS$u]`,]`tQ\=py-WwnhbKB'^}P*;ԼTr3?G nv\y)x֕o8xJ0G ztΚϊiڐtu2iO_$Ziu B8{H=^{l/KWlV_]KpI?=|P{Tyl>JZY|+|ODs*/ĜSl QWɯwCht矾7uDՎ=,׿N.>ul8;;&9Ăb/gK$%<޹ĨVTJ}k5Z|6<ВƬB)^gIm ؛ޏy{ǀO2Ѓx"}2Ϫk)8yw~8;%$Rp]!Q!9M3!۟z׆M vG[H֚ؓb߯8۾JRZSpAb"&"ygLPDU0QY]^c3 !ޟ.co?* ʌde'n/7rYE'zi5roaa}L[q7E{KW }{/f% c0ynkm+B[5.IB&")#&r&k#)g*vww!`!v2ęQX'i͏N4ER-2E,TOL3Eb2gaįH|X>lK4Q FGϥVR(4FL!LDHohiTKLu[zկ*Rj'LqS vÖMT 0LCGZJU>9VNA8r2-n a^i:l)%Rj3?բ~ :t,XmʰLMx:0ؤMՁ1ܬGNKs&n'dߝzfKi_m^x-I=12s`1' pB%\HEF;7I<`mS@.0؇9v1$#( KjU} f*ɪ%JJ)̌dFafK?4z48g^Nm[߁vn'eQ5k7KɱZ!蜕N:F\rz^W90nPݺƑݺ7k: qu+.{P_f/y @4<=+Ӣb]W(#C6JC*EHGE!+(Dڦ7UF-EJ6 p! 3JQ{adLkk2.$T1lF~ܙ/nf6 xFJKר6.{d22KR- FYf7}Oc6»46$:YMؒ[9ɿkesqdDŽ5q7mY4% !H`r1D2/sJ1tBU['"g?9msV0}5{eî-,&<)Oyeܲ1V[uE4d@gdN+TA`W.t{ G0-Ef6p>?LEq'_DSo!h2[}-wp=$2{+ xr>֗W.NIڬ:uW}j;p&R1>utݰa?=mwMwܢ#wC ~]Y|_Nan3L2^ӵZxU֬zCymoeuh|.G9"nڡٰFlZ=mqa2Ƌ~X~70ٳ TV)6tڢΑa)B1 ;5ٍdAEPT]mN@*E>:l:oLƘth6ٷ!Y3?!0!PfiL.[g]cȗA e`HʶEpA`,%d2nd ),3B”^K &ϭ""gu_ a)]; {s3>yݛ ۹;5(%k@Fbs#,BBVS`ת7dX2q$A,M5 0 cXP6FB_2RVad%Z36#瀆KͰJ3]،3Յ..|R]xg7g~S5:gfy/t7}}fX@FʜR "mT.KJQ-,QjlP4OC5"*)1Djj>F2 |(JLEd[!"gNS01`[qP`wHNH*^6ڠ'@Ji0J)2$1+kEmh(l!32 X l)-GQ| &,Ef\l]e9aoԯU8،?ՈX#¨G* 8KVgvYܺ1l3r]ghkT^(:Vq. A~fÎ`>'`~T(V{dv,}6K2$ 2&k$AYkQCQ%IɘXe0^d$J x4$NG Y$ec$ `[ь|>@r-x玗־Lqa9?bG~f=uqyfg۷-lؔj~]0 WQURQI!I=2ǐ|7}OJzt,fXƝ3P2i6^ꨋ0&qR:L&Y0U ~ ǞqOiJ [חփ_!Tq1b| H㏢???t)_uqyyb~3L]njճ٠BBݿfK_3=}pmϗ?~o' ]x0]zDA?jo&Ok^~M(ioO7u/𽇔dV7/$,-O_|\N'=W omVջ`=똩\t+Ya^:/V.FW??ng?m"筼xBr>e/u69>Rf0~6~<>]̈ ޕFurfNKE֚kgAѧRw+^5<^AUO?}z>0-WX=؛1V'':w>9|YnuHUW7%s/''kWexwƼɇmi}Fc:wEuoKN>[[}zן)YONR7 /,u/笶zh+,QP%R_SvecOgִ} tˡs7jCu'kmNNk1ma`~&ĨmҮ&zkd)Y"!6 /)ܾw|c^E.K!dE!zbQ"` u i%.-&[L3\ &O6" X_c6 bw轗Ѡ1E?B'k&Zߜ_+&VOv5>$9< j+#KIݺٺeIvì.UI&? n^x/鄦Ȃj n+u H8nZZΗύ„:<$X`\ R<V ֣y $K!FdB~,&Ԭ\Չ;sUAB<Fs-{6-}ЀÇSFo{gR 0lNx]XrVJ!ז B,K5ZI2r (rAZe2*F dͥA k`#gs a-YH-DMS#qIѠW"&-2EϒX $Z05<7AZI9#I**,$qɅC͙s #FܾȵQ5I`%SX3<#V(8~|8P5ym7"dsg%%,N>> AuC^sA/'0h5-Je ;T?|n,J .!#PX ~^E6Hq-ٵcgԺ5Ժ0-:Ƒ!TDw^3$VHzn0rHufsM/"񠣑 e" [.DK˭4$K M Uɵgك6 >} h] K48glNVL6oĴiU/AAI%b<|Y\hg$9ZS+ܶ mv.i Bv-F'|/xbhڻ+֤:IPrA=2^L;$wZ~=2UJS*nݔc̝șT!cCYmegDG~-4hE Ǩ-$: ?E#BkihHP QSEhuH6dD;ܧM@1r huIE8̹(j0~h9Uγʣ/mӌ˕@'>Rh( p?z(J{͜ [hfHvߡWS: +燼\1ΛV+vYb߶͉7[gD- ~Bim5K(8pI"ks!X6>V;945IKJc'gN#]2㕥hyZ0.QY)&F 1ޅH & ؆+5dusWbGak]Nl#ϓ ZeP]m捾p/HnzkA1VBjd0'A% ڠd[x>-`o@$52i:᭶&.iOuMfQrL9%x x8ӊ[k+_odS ,gy"p)DDgQeG@#Ӝ\-] !hfd͛czY ڋzYUe<宗,8w:XiFO 6[^B4ɣ[,eBDkۍ#GR觙Md*؇~y 5.WiTe`f]eťTSad'`0lIA`D2l{W9$ک}K䌳2g-9a؟dAP+9Ohe tOHY ǒ &UwDh)+Z cTPs 1'3f)Y4V.xt>AJVUWof4.턯O)˫j_[j,??ꦓGolP!Xn_3z=?[[ft>]嫃-hreğG vW3~F\m/͕(iWېʚ_ЉR11oV1KB;k=\M^9ut[[޷Jm|o/cr=ncԛ~ 7{w}ӛe_x|WxGoB^mһ/]esvΰڰ,GvطYX뜩o'ò_o[v~,f1{-mzhCW'l՛:9y*4߇1j6"cp2,D}PRL\^8tiϗSZX ‰aʻ. JًEBA+LVIb .a0l1tR*#U0P8v->M'_bw|խJװ;xM$Tgi:|Dv*yCL:uQCHMI5D%Ea8sX౳e rBpѢ7) ZbTCL!sQ,, HoT5bvMP~nM b֒Y&YAsQ)9Ǽ (}`͜6 RuZ+'Tdc_JqߖIp>OE}#3ʧ[aM>j%nVSϧ[tbȧk){p2dz㟾ԋg& Y.&6!f%J!k5d&%gõ65uڦx`L(бBհiien6;I(\gHGuǮ5w\&<'u0'/}Ʈz)TX5!7A[w1yM^TJAdI.bREA6XS"xƔ \0hKG)l2aPI"+ UM &Y^f΁P6I.?[B&M:˚7 Ng% pjEn{#{V ~U{OnM:`#w6mLs/w}g/ Kx7tåMWz9'=7]s::o;LƘtoSg&u+pgC̣1lu%"d_!_S ht,D*KT%M(!)w#o5YtA–^K &}KeJo/ۘSty{$hO(n\]WfEGY]&ӛ;G2:dV"̦f!TQ,-g+E)TFrC ً&XO:YNd@P-ȷ1? /s#vNp<W8Vj vgX6A$'O^/j[ O2*Y(ʐĬɊ~ 7RPj3cA*R>[&T3jro)ُSPx*[8gDD)=j|9R,>Z2Yz:%I&yQK!9ihX'Yq&a^%+ϙ=iD'uV)z7sGltJkcqzwxJ6BAUILF O:2_ g, "ҥ.>. ;C3cxw яO.3S asurTkAzjC&W:ի}>=\}Vpm'N-m*4pmV=-RWjc_Th |ચkU6S+=W ͳ+67{.pUZW? \j~|}3\ܝwfy6ZSkq sfLJ(Fӫ(rk̯1 ;RR:i^V :X`p3>殭/8LWk;aZ+f_U'P$?Oo/Ө|Uۤ Wv'kIR^.\鶹 5Ntj?TDgG84D~ ~&Qۈȭ{3N\U:B¹%1i7[Zfs'{1mzQ+4jV㧗?m~p>XQ]y;sRk,[:|eG^< 6Usy.y©J#Gy M\,]@KgD*9o8Ol<_~ \R}|ӳ]ܒ|}9ϕ"x&ÏLz<PpA ^ cgJHHV8$0I#;&ļ8iJ)KAJB@GOc(d*>+_(JbUɢ<ƽɳq\˖wv| ڵStY޶gDug6QxD`?Ҧ'M9xAփرS^A{9]@޳mK}#W01^FD]d5B[ ea11[R$Igƽ +TJ 4$NY}0d Ğ,ܹ7stx3>^VKw"|TϿޟx#*_٦|F.y<}rkܸwyC\֒CSu,^EKV& luG1ǔ&gU=.82yY?{F!&u@;72A"Y֊Xrz+rۖؑ"YUUXeIƭN:0q:4a˹%ȁF"VGAcPĸ'yуNXoN9J\˜&儉ym*I΋Dpz_kZZū_{?_6tZ2^|\zn l,ElZwoݪ9w.HHC"4_-&6yJj$B~75m8[zĈH^H|70kÖÛҹ,s||4t~L8"cWhMܩY̍5%91Gɘ%WfS]JfVA/l)$YEŹ:8>LqJ!尶xc-gB˶a,۞|.h@oyʉF [p|~p6_s#1\&Jo&W9Oja.#&5:{>]NL~M4,)d9"dvhIr.=u䍊M2L62l >1fIb{`rf,gK6I43ؙl\3Qɫ$ɂEJؤ 0SVc<+. #Vyry~(UC[=. Q5t__}>JU]ªV.iS^?|`q+H@ duPћJ8'+T@T(͹OXcg}HކL29c2Qh >Rӄ,jJRs[G $$107Д%"^-nkCB kq՘8|B\OcGNB6 ۋlL7<QZgTų)QP5((Zu% Jû?biϨz(>Aƿ~h2ȰښJQT))Yf]ք.k¡fM럟8Ew#J"͹WMjwa|ht_!ZחG=ח֦:nkW XC?0zuGt,.y4`XHk& j CO_ZpE4_)td@W6+L2Ȯ"\c\c \c(Hb$2F0,O:tF6*k9Yn˝km}H3ݤvmhْ̑l4X IFZg )@z.VgRYuUA͊Y/7 I@A5/Ǭ(9 2.95\Աd݃yrqp.>@J!I=#WY,rIq D ,d'M48,HR=9MACTyEII[#gfl [}uhYN>.qj:,xMz8-ʴzU FWx58/yP"Q*qR" l" AIA jl$*b$ TԦ4`yg ƌ,Nzzp߶ٮa4LK& ضPv`7^2d˸'1)鼘<5KxBYE#jDٲFF4&'L\RL켉Q.O6\5KA31V5wr2Y"H&nev1%-rĶSFvxuix!xHָd_[֋Ӌ^L 5L^L蘌tƯLFZς$'#N/>^<}wmpσPa-^x/*xS\e4QƤ|ʋjњ[*,< W)R*ѧ8-/.HPPn)Hŕ& ̱XEb$R;AF&x6%R6&1Ќ($r,VpdSPEf-}H&XO 1q)g摕 N-!'[­áhЏ]Ӄ <;!BQpsfV$.e-Ind)Ko#}fhS)w!H')$)f!#0T4'zɥdYd\T&Q4mq ֬@+F :aD pzCAN g1":q<&{UWd0eKn1aJ ;oF(#2g6+BJ e% dd"r`!:NVyQΠD@K7ڋ^F\)hؔ-F G_xI͟z6Zgk.it@s} s)O.I^Rk1̥x~I_\GA *QW^pW)eu3ֽy%~*1Jj wM.$77^KKfӣ.l<%IΖy5ot#V/TqˋwaRr__N`_#yƲ;UC҅\KYi+ye FueZ?! ]w9 {w+3<@w<@Bɣv 1A;j%%0VZA [P(2JXQeAʁ %ȅ7"(Ld.d+cXFޔ)3aMBi1dn|~?"Ftܺޛ}e93|z!&JIL(:(} Кig"@1e#jdFxPxyƝb(Qs}aP6׆õ1Hf!8}f?0U[sJfdX'k$$w-!S*[V ]1 !!iq@9Z_",1!xCIngjNzJ 6 jexnD(lœ`&N:"]vJJ-hID{6y.#)vbr:ճVtAȌg~$ec]J )gdvR(Ad4O+OnEDtqĚK}kQۀ6pybޕq,ȧx7c}0xއEaQ-qM IZ3K5#G J+0iAd_WAKUW/S)\YG:z{\ryW}6C7fYF|R,=`+qZ~Nzsղg ǿgE/ONc)ta!jK>ҫ~a"WՄԃu9Za~VSge44om J*WQ`БT #5DI%*y`w^Y8?oQPr/txΔfυ֓[=n7lZgW0Ċ# @ἶ pvuy-ׂ_x^ vqyU-U}ZuImP7\ S \BCP1 flc6|#lJ3.#2@r)+xjF>1T|I=? s2u#:}U6Ɂ%By| `к`Ē ë$Tvy:ݫOddK.=Rx$Nѣ=H![Y]>gke]?ZR=і\ZgR! zC}( aݴyd{FZ,&+ZA\yJc|$:<6I$.hηלV=bf:39{x>ؐ.5 R 䨩W+WnPko\']{wZlj:k<;ٙf(aDVRi3*@+NMLx63^zLe+@Uz4e| \mx?o#p7I(<s :.ޡϬu2y4<31cRj75 uTsZ{!%`i% lt݉x9/}NBOٻt)DdILHLD%KTXARt j FFarNe#dWghJ!`b `=m Ů6Μv&s)Z6uGi΅n]!sz [^?<: ЂJ*S[--qI"|ctt*rI%%|\"KJqc\JN#ॏQh#p驱(G#5U wVTY.+3&'I֌!t@xcNTײFn ވ9F|l+pNQ)L# k__U,"2FB23+_caI2l? Pò߰㡳rU-I^mD s$kF"$T(Dp̆%sQ/_u`=r˼\4ʸ֒ 4baPD2VH<5% ,ZwAcdȸK}{AoWXba aI;bӬ;,|IۡwLˮaہ|.hBzm(7JswQbir?Wp3{7* BrYoҫOU+maZRD5Gz~?a's e>U%YB>]ǒCG3\auZ5e%߇=+gVmmgf^סqB;eDdZ!UeP)il6}.J|IpحyiM,FJlzǵQ&E@Y-v7L0]Tx 쫷mT`?{w+d[уAgo s":!iy,CiPH@ W / 0v!)OɒO%:K?YI$)nQE%o`?cIm1Pp"[G)CG$͙lT%TYb$PO4$kSہ֭=܅.)[; Z4i)rxGjwC )+y8 ?]PC>g>#HU (5g[:hH*ԪIº ʹw:]m}{ѷ> 1\ȜZisI8RN(`!X.\"[TetvP`+(wbmJWgT%gt~sxRtx ;AuKA3ALT o?_U%aVPO!"eYֆG/;xyyKAGrb7#$$$1d̹ #7"Ȥ2 мUx!Z>P!Db:QqqVz=S-ӳڛlɧè7f/|&9nb UG5d_\{GLN cvG)ףgqJg{(JI;3}d!el.ǟ[Ut;:]g[زunM=/.G+-Cv߸0r_}uGuzYs`[9 lRR6gsHSe6ͭϤ MW G7wRݠ< J*T ',B&e ,<BDw%BQ]S"TĈ<1 &#$'iQ5FTR VH):ma˾$M^.vov|RtEfJAi<$%4sKy%Q3V *-#VyP G(£ e#2z mn݂ 7҉ a)Lڝ."->90ap9ff7@>+VwݱQ6rB&,A, QDBG% P2Z)?I8e@0H,:&?{Ƒ~;cq$]I?B?-iRC.~=3|HHQC'dr9SW=UR'ej[g;2UZfƾXHZBaApmtb'7WXU σd[*HL2qDb!aDr,c$<,<δTRU*p'Z )&K=(Mґ0хfClKmGCb mkv j;{oXv&HU$*<)oR[ jZxbdBY- g 'H !eB=rTi.Қ80;MI͏}C M"+Aiᬉ @N忯ΪڀɫpG3y+R࿇O>~| f8qwVJv4QsRa z%d. :Y+A_#zU|ً:H4phɐiht5ps0Nwˋ*,UƆ+VLEu^}]@5g 7x87(^:yŬU>y㼀X܌f㹨W5򸨩_f@N/7o)Q\@TɈ%VGZb(X|īi'̻V Koq]HưVDpR)ÈBxd9B17Vva䐤2( #5i8Q.܎ *>dD_ ݔb2 jHr5n{}|ϝvT$D*;4u537\mԽ*^9LI ("kM!R#& C9s޻hՖV''Ğ_tPKZx ᓅ{Ď"8FmtJk%ҎlIK9.|VziT067-„Z.„J"/ІhzQ'TDTDqM0k%YyE9c㘉@aa'7 RNq,z Na=a[0 ^vѶ%>){o~j.b,ߧ)J3o K8'LZ<6USY&1_=_ۯŧ~ ^^ 0i/`̕ؿ[ O)uQԨ(!lJxxdW%zpZ[Օ>j%~~C/qaZ?>(Xl/Z_;77wߑ\?.0csׯHyQN*jdy"忯ΪɫpG3y+ph|8>oGPOf(q _߽quXi:8)R%0Ȓ DAtt)Y+=Wg~֫|5Á~ iBG8NC릫涕Ն?U<elh6-͟W_ʴ.ޫϼE4՜-$lOEKni7< }&iJΆ:jy/ CKV?WC^z}~Bä80)ՉA0}wzc\zK[k9WS*OͿ} O\?J*vpRRD:J2$ߒs1j\b5빖?a4JG1;XD5`0[(FVg)Wүo{uY[FȗZrV:AfY ayi#K-:RjKf(LQ &zGgrT7m>5țQޥ#T<at<<=RΣ= _ Y@x_FzS_RMT5`$@iWŸNYx N^b\b!u*޶b%VMrr6&eڼ(>"viD {*opt΃eݸ;wۚ܎6\>Yp5[Ķa"q_A9RP-RyzyJb\"\ןl;ΊMRe8M[7޴gc [ڢ|9U޹*)5LfE8LiqZP{ υw*i٣[6iNfͻ*rϑy-֣@eLX a**g(%\ŀpj&([*< epPťԹ-{1 ׳Vq&2>,R1>|ieFhCH(fAE+An`VN f N =6"pu˰ ym%.gJ{8_yD^BlZCXJ^ZBSkCZ5ãdrA]]{M뺐Ki6kXpP?mc:gkg,nizg8 k0DKZzϽ#BMTDzo%[Z`$MԵmYc gàtGhϝgFhݥ=з*L'mTф h윱ёOJ zSNcG3@QlzWZaG>83 AL7XuƮ" :e6rk(|-s"'Ⱥh5`;: nhgiF꾣.+ +AnfU.)-ݰQ$P6P!W t{z] #(lj3EQL1,i쀟P`ktufWSy2*VN Ȥʆ4j}đӒƢXŨVߕziezmn2ɬFwE$ؕ_t#f@57kiP;1h a, zIc-AɂA  ?( ~DUuAS[K1tX,_4&䎊\0 jM@g"52X7Tr6ЙlWd7M-V:S -i7k,X B!Т%PAk@@!NcĮ|A+AETE]%E{m>ƜQ,2X Bt e͒ftYk|B HۃB6z`*{M2 zuX3fcƘQ*TehtD5!!˨wG켙a;g;nK ?:V/x⳨mFLZ$ >3x(ur u@r (}vG6&*fF6f 0r 3i<ѣy0o _q=V24D iyME>KÝ.Kcl3*A 9}d@֨Vv<o 1t|t`uaQ$ 8Ձ5;::`V/$ ƚ`|?»y`+y'kWمX#WNb Xk3p`D4hcc.u%{6^,s@ ѨTɃwK|TP}8x  L)ڽX1z. mM!g0Z.f;6@0]:x Ġ-&(ft aہar<  g"“2x9vE1ELˤUUZ&B[v@¦lҰEu |>f%0 C渵h}76`o<󼳛o`t8Ymi/N6뾫,F͢@77kT3 =k(ð;Z,Fw:ܚ6fѲFhvfc-`i/G%o(XA3ƂDhRVt*TCLM]ڡ )0ց- ;ƾ:TvV&5sJmhOY"G䍡"U GʅW(aZT1T:9(&#^uKj*=66Tcs6[[EВQ<Ҿ-BQ`1 `  4lm VZE"P/+HD $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@t@ꇥܕ͍([y6Q}BJ aE $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@%(D $J QH@t@ރ,=$%m%Ы[FZ̳G߯y)nP{?廋@+p Z`KlKl%>KC)T`0ƃ)Wl.Rڠ>rVt,)WPVW<g~uprPشG'gzupji^U%թ7?kǏPo:TZ#gG 6ξҧ[o/e7'^H<Ӣ9 ̅BOQUbnU=[A>[Տpq$72ti(0{n5ӭd4jċ t~ԛ݀ *sA;;-2Vz[^vf:v]ӽ96]$6v$_f\-g1Nn 6xnEʗU,)Um/~Y=+|SN:=}K??zߝ_I.7m)mEKek9woK[H4`^mbL[L={:gxKHt:-Ms(u PwUyѨ+ ڎZΡItcNI;S|9lSՋ'oIP? KGZ;}nc/ל~z^?/+Y_0oy^FlI]uzU;9?HΜԿǧ ˭3+4)]ΜNY_ $\EUEGUEDj}lt ©}{rCo3? 6B}K6I$?%9brվG5o[EIꁛy?wVZG  c_MIFb%5l=H9^1>iLhb !4nD$N uauvZlb%;jVkcXݵOlFo?o'ȼ sӇd}r44x~>O!ğ|sO5~uټ(eNwG15ƜILJ~ҡg۵ۿ}9\\\'ol滌lw:2v9?Gϖ0gͯOcwt=v)F~o`Ga*|[;_`hwIfӝZ¬۰\DS:>~|yG+ҨE޻D/O/P訟owGcR .lgkm_+ &EJ.Y~cͼ|sEŋrp|)o!T:%DXdښFe (!ǂCyhc,b>c9t`$[@9rL荓JdY9K(Qc.T \[T  {'N-l-Bg[ox[VrRnY1PZ S7|ը~b+sЯF-xi@/zSz)b0KK`\R4%nSb*αDʪFъ9G V#k+!囎0&_(TuX?޿~Df[ɄbAEc[g!4{n>|IIkG״,OJpR{xCf ۽ L*)ݬJﯞs8PmakRɆJ6]o=}‡+GԼQrԷ_{w4^}@x_y]Q-v^-iyN|/ yE|s[v\FJD>Up٣D7ʠn&%I A.s]\v = "K 1e,0#iFN͇:[vw@v%Z2LJ&ner1'-`"ڡs?_%zԁpqV.iZ/9ue\T.ǧ`a*KEhCFFZ@DP9nj.zǩxh+gW C L']b -O5Jߨt| WH2D/T<)HPH$;\?e[$*ŽiƓd Q5`ȲGPEf-A<cyD2<9): LsĦ~>!b/sS )Z%ܾA e+lO,[' #G#l10Re4L*ɥdI$j\Tɢ(H󡍸É40ԇ5hf鎁.D2Ē:#^9-2'ً5r >ʓՐcKn34ZD<#&t~8S4YJKJAx5ˆ^bԇQm&+پ4,r%T1\NDZ +ѾʶG=2-/E@>FҴ(!;7yP `O ?w!O\|B,F˫ʹѺvSvo]'Y`_ji'i@:yd*'ƁgU^=PbT1;^nBpa'ޤ6_۶_om>J3qv3CMfe4-\2?ӌݙ}3452vNtycؾ-nV?M.}qo҅u}'ʛmзs;n`c]ł9L녘`1-$YVwH7NrT7\V`]:'U86ܨ|tM({D b];֫'\un5p-H%iR:"hkcx"'<*L4sZeNyƍ;+u*z~Ujt3 8afܻz5&;bKQ=~E`I\}ŕRk={*`l$,R<4ƚt#.SJ}B6JfEz=Q@qq+.y_vA38.~~}krx1||GM+&3̂9)JUI ϴ3B,k`-9fY"J_ 00 :.4uAFo6hfKY*S͈ʦ̐cV)'s;Qyf+;c-*/sSO;gY([ɉxGpҩ, KNI-G-MsX  =p'2c޳JXGrř2\!Zd(@ZЀ&dc䩂0i /}jծG| -o{5Y_;!vv(eK`̾kop៿v 9̻_M;gOa;su~ׯZm ^k4;;lʷO1yaR g`k4}.S\l뮺arFF띧Tw}+l9p R[<5꾩?6;>V5KcĊܩgf$2NԼyX|=V1/n4674-!ݞY: )cQvn>\;.ytOlg6 R跕QYE$$1_}!ZBJm7 LScGkRYȂL(T9qhT񂻆&Ap\"{"YgD; =іDV{!M W(&V+$SJ"s|&kmHldq-! =qz1(>]obOBx9ҚCV3i "9 jGVT׷H Ru.-ɿ`d^n >]d#Ɏ+v^^ڲ@fEXU.*> ?GBה\0X9^b + 2@JF/tJd&g}&Qj\ }1‰t 0\r.AXsD_{O܊4T$R)J0y0jxqT6* ^ڠLiL )&YC]ɨ@{YodFM#ƥRa1"&(e=F`a,RSGTj+{0sC:&=\zV%fJIVZ'6w8dmvt($Mn^BZy8f葤#-&-l˥O˫?m3 THp --%pH_<;Jd葅 V"aQ>/} 9v,\F%AJS*,sV;9E-^Mnyv˸&r2͈|< @@q"@%%4C64+S*JICN6.ӻ+4l*aŝ|` .h9K۬<~t[o΍ gDC3#ބA_47 =w ^(eUYTYyFt1k+QL#:FAD4B)N [O#뀐v'Q&(a+vOLnk37Z'Fe+hv4MmR5?8dGh-@ƒ l ,T@JeYa-5JNcF1ZleF͝QFGR5@T)`e n Tzt< qݓ:xS:EY+fH%aR4i?XUt@eJ37NIjbjrvjF.#,E7f/߀.a?p<~ډ^Pj<^@dQ,RիZJtV LUVIVN~l*xP:Ãt/`*QhtQ[ݶv]U/tY~xûG/oȘΫSU_UEyy[U`2d&ot|3G5ūoU_BEJra%~yZ67Hpp|yX4ǿkV2*SaW@U̮ b[n=(K]GgWWeWV!8~Pu\݃]̮vzL +XE;JRTUcgW J)3ayE?J+d 4TUcgW JF2ٕ F0K%!^>)sѭFgO#\~L+*f۫ꔢ C{;8&S򾦮hw϶3 i04JCkUb˞X}^\ok)jA߼>;Z*fdƛR;'Kb۝mRL ɢswk^eqCee)^]UWr̎w8M6 )~W?ætaz7ĸĢ(`֩RiĴ}Ǽ( ^杢yHrV/\wT uw!R|ds:/׎Mt1_pvbg_"܇?O}(A9ds/}q\#slGIns\ Wrli#ƹ@p+ @+ @+<FsDr\ Wa`@+ @+drW͒o&ht_3n_=ybi#-iP){H-ֲvam#JAZҲݳ5?y8Wx6Vptt  &C\pnZL9VFwG"\J&y$,Vi,jRʃp-ḳ5e5pTņ2AmaUdv! ݧWNjomYCg6ZtQa`뤟bJ(bZ $`!*h=en98a9\Ӝ,7AyHq)9IEd03,*8c  $cPBd(eb] ֭ПiHF;K"( K}SF$SkkAc val\eOY53h#$ c%;ǐ >q@ "4(ESD9)UGe& iU(1T{ >zK ,'n# -1$&ܦΗ%)w*?<4N?N:0Tw)vݾgzV&oFQS ָ/ӯRV7mşٞN=ODko2>2 ϶tf?LםVNfd ~hNj0Y/ׯ?3M?K.FD>6kT4cf׽b/w::irEd%@zU! z~ih\rB+ O}($ir_MneIjٰ?/ S=Ҫ$ђ[תO̓Wݲl@$>},Nٸ=hZ=yl]_7p|td"J3^Yk/R,Hy9>X)^$,]靺Z} 7Q#Bs/jKr[Ou=A?3jĀL&֣ΤU$hUN:F7p&w:%aC%en:ee4mGvjk6:<Äeͬ6!XDŐZ[+B@( *О(GǬpYQϷc2Lj&h,ZVnFa|y'w_Gk8`@[* hyI`>܂+0EI%3ȖXR"`$ ,9$e{z{z{z{A( IEx$EٟG8ASQRQĐu * QPM<0nʄ#"(A[̠.k%\"ێm m.tLd7bNf^lM:ĚG#XOd}p1 N,<èBx"ebXч&*:gZDiCJ \IÎO w W2Ҍ7AV@h5_(8? 7k$0I gxxSpG/N댻sw.s$ o^[5kJۮz#rJqɤ8O*TR3&Ǟ5IId)fm+VCѣXR.E"8R'"eEʛl5̉lr"cMdc>(+,B2a#5\Q "r:8MHY)$T[1.83 kһ&Nx5]|ə0 кΧxWGʔεgv8BZ6_W%`LίZ8f("< ]2yiA*e``wjG(((( Ki$-!j,RFbo# fskXrm; }L{e}_}t5 5Z2O H8RĠ&UW'"h0nPCR( jxT bFRT]` T)Jkdz[l&iXG=/} 'H C|{@Ҵ]25p6J\Cb[Ï]9"m#3G\1 QΚ`):28S ё[np%,g($^XobA% ٻ嶑$+l!~xb;bwfc/^G]%e&)َA x)PD[©SYY΁'MFbuMU݌&T[5s߉mW{- OI=MJ*A~~Գk%iGH{,drS_j2{ Tʂ {b}dˆ)*zU*t<[x EJޱw}\m'X`T9F$㑅H>F0 *7덶IC c+I|0[MJYp^lbٮ9ʳ:MȱZʄ2a^!X0A* <8NxQ 9^$K}wz2'SœąaWTmʅ)f^6x4DqQ9,:gHs< E,%>DQ9s s:mvv: H!2\ i"pNtb17FcE !Ieˤx-yA)ıCZb c\V nݏ|qkJL338(B#`B@6Jepx*1J@3g- PPL[r kJ4 5DP#OQg&\s$HokX(_]! jbJ#68R$#Z{1[,6Vٽs/6/c*%/V'=...-a2Y}& )<,o@>Ymừʦ͇`Cr}ēb_ } (ݸ49+ RŶw O*µ8 ;ޅU^KTtEǸίgq݀c`J<rHx:KE ^T:op5ӥ.Z&ĵ r B($b1@1gz4‚SV3S{fVZכPפ}E l>ϗ>oyW[մiL=qSo0͹_ ߤ#ڀE's[*Ss% ΨEBEwRpReq;6G(IwEr 6 3\E%#)DIYO%EG;yb8{,680fwvåEtvkt,Q΍ϖvnGX]UYc܃ǫW0+%AHptGcGa$˫'{b{tHeӒ0 D B!pQ~$f,B3"اLKI X%ÂwDRA󆉡185[oE ߹\37M,ywS:5!ԕ]z3T|N 6goYr VvUTHo@4,a5Vp4si 'fg8z6RO{rAv鴤)_. #r]c8+W䂩XIUzz8^Vf ^ VBf|~d֩)U檒<c/]xx53:~v4~o8:'f?QIߋ,HRnO0'!Op4]aP}2\Ŭ55 iE/'+:f㻒6r$ @X(ܯONU'X?Q%>f~&Śg` !D1i24y#>W קr?=UqYKȇ<` '쏾1҇M \Tw\!2'.|c:&{AU}ٽؒNNX`,b@MiT40_Gy w|iSOuޘ46Gm$ }ySF(˞=~ǾZNW eyOWCWi@zm ˮPKkCX3((dQdnr[ 8\XLUo#g*  OKkQJ' Z&TcMc]S39j$#GՄ vM[ Y #H(򹖜N)bxЄox>rp`9OǣX::y*֗^ܽ2wöZbuە\]=YK!Oz>c0l\j ܙ ˕AWhGn9{wxS v"Xy!ۚLn˚s5J=Lq_:5Ş?:ݵHnRѽtmBYSـӬֳ4 I%GJ-irD@4ž ʶgk Z6AOj}o†[{&v8k]yҔvobHW7ۥsB_byi4Е:њFtF#,ת 5Te)v[0? KqخB\Bj=T hb8UK!t@2BY"jΦ7!ɆAEF'G8JP}׏6P b#)07F tted f%5q՜/:0P}Pwh@^0kːj]ry:x7if9b]Wq!8GL~PJO<|}LDCnq`2xH~b~CO\2.5^U{VbՂ&H$>\,xW-O 42)Vta+lTL{61oF0 *̬ #:&^c/PXArYݷO%`0jfO~ \Fxm6|;oMs(Y*zʕz^sfY3r?,&֦3&YJm?Gek>Q\_e*7Fzfӑ +N}tf"ca@wD@""TtɓuǓKpeg<9JɷKBٶ\zOE<9X`tI'vZNW e·"DUܡ8yEW R$NW eeϳ++BqDc]%3UBP*}HWRIλd]`!JpCW m+@) JiLT+,1 ]\;CWJE P>~tRBCmzצ:\#96}.ʑ6]` >+צ'J1(Imz}cեmFLꌩjS RBn%o=9_[p^:u-?Q~?et%z:t }XUxW *DNW %=]#]I+q@5-Ehԙdz*u" $MetO,pwᮬ lP*կg~Ṟ->Z2,mz!T7oP]`ITgh:eqsZ!N =M!MsLR+_ *]%tZNW =]!] +,qw .rvJ(V*ta&XUq! ]Z0]NW %ϑBt\ݙ% >J{:CEU.yg*+tև%{:]?{W6c~Fݼ"7m0i#nr',Eɇ%:K,~*Iyd+DR׮jܾZp :1X-=PѕAWSS[jX#”skBK5m;]J{D"\ &3tRJhi;]%Rtut%x+xQpA;sVΜ%\TKpΜhSeUVQ'c=[bpkZkEȐ7ŕ\ASTK:d`Hg,tƒKhy-m1%M,9+ ]%B\QPz@L`;DW,zp ]% JkWHWi]`.yg JBBW"]Vm+B +)b]]`Pg*+tJvJ(5Jq]).]\PW JO@{"J ]%;+h.fLheR랮.`4%?K;>` ZKځjIT0.j@7c !%dl?Kg(HSWHP˲`չWւK;B,Z(%kYT^U;PU,Qg*ը+thmtutE$-8KGEԃ{j%׮J{HbT,=uE=^ܯkYz $ké(9 _ B"T\YWؒ䧪 "Q\6"jCBIU6\t :CW nw@Kh;]%r."Lu)9ŝwƇ;eJ{@ ҥXwGJt3tвOy%׮.vwUܡ)WӮ1P3IWЛD bWUB[jFOWCWZ ܂} j&;U{mVm1PЕ>JkWskWjrzhJ2WAWS*a!JUw b&BW $mmGtmJ2ZhkqN *篷8zBՇ#Cٟ`mjRF9Vj/ dǏ8 I,;ˮX4 -mhJ{-)׾~|@Ȑ!WDSL$x炿͐Rt0aݡWtBUiPR"z@昐qg RBW #sҕُmƴ3tRJhy뵫Rtut%Hv$֌w\Ѯ>;@tﳻDRH:DWXJpYgVUBx^$]~K+LϾx-Lt&$>^5O+@)BW]!%>&`yfW{Mg>Zuk$21:ʴ=])J% ]%ݡ+@1k;]%tutE(Wu>5y_MBJ%{DLVϲ`_tYj$El㬨=JrhˣzA|..WgnzcVRj2۷m(ܤŕ/>̓@c'Hg )+} (*}YiE7DfS'[ܸ!Ő_M?~݇սDݫ;_ܼ\v,4x%!"' *%X,i2?^4ڻ_}]d]`uVٸ(r+lNZDm+8::Jq qq"j1rnW G@/so5}@0y|x=uwGv2_C:{KlB Oc*5(K)“VX/5^#6TJ{f8I(Rߒ&"lzpbq)/FZO6wO&0\/, ^b.t/} moV<]`H/_RnvGJL٧[hfͲAgE{ ks^A 1%1HE $ !8`vc!1}bxyEBPGx@/Ζ"# }\JNcRzd 3âҌX,1#APzd쁭W?>LBFxw֍z3hgqXeytӈDt am-}LcRK#-Þ^"[ Mt$# $ c%;ǐ `>qP 6X'JQNJQaw;Rhb~ rHW>ࣷ1AfA<8e,Ɛ<RWX<_B!h2ow#V¯Joޅ;wwdd˃ć/Pq@_,a|~O;k܇S)jjJvu.6O.b)4Z w`ovn00{q4̙} w6 w^%@NӋu mf,։jM /<ȃuZ`hE--npn(wKEb 7gƅx?Ζ}wO,fxar?7s?4Ib.).Rkg?0 f“) , ,E?)H˪fuJuݘt`.6BHJi{=/0N~.(`eA,}|KW%YKl"bwKEBpͮCx/qE]["[JCY셀VxUP#V-Q|ۭ͙"Ab`Lg7OÚC 4=$Npz6KXRo'aXa䢥UAY\%>L>~4k>\`i*hRK/>IVhJ&Ǝ/'rv!h,Ud?>FFEUdo}K,g?:{EU{)/U1VvHk{&c ?ŶP5/b0a4B3MHc)\Q1$֊ AI g" 屷ؾc4)5=_XbfzJ5B]]˸LUo=,Ng4DIX {FgR[C^ *Q`h052Ôŷ6o]Gc2@G<ۻqXgvE>ew7&Ly;)HTJ9jzߓ~z3jcZp q=B{8X83']S7AQe(NF>i롳*)e+If>A`1Wᡖ^CIL X(>T7K>sKhr#DxJzUSI'A8-l/=c"O"Ra`9$xO"šJM3 4m|V6}SnBj>xj)Kzt7OHn~@0EF3fX! PbC!Td30/Tr:"hoБq9Gl;6j_uʋ@m EQP- $EG8ARQRQĐu7 XQ0<^, ȢH3j9lqrH D4V PNNSvio$nGl٥;Y}QDsv(/|S6(y~9c ƹe#o3H4f>DeCV;,d(ޢ|,t2[富}cـz v1Ly2MեT*)2W/{W/[^qފ)sN!Rb(`2z7pytj&i x$Zu2LwP&ye4hhVHKDecp +XN泇"tGd27{8<R|jsݼD+,=Y䙔0{ (||ӻy'*];^h}EOvgfeST~0Ťힴ.ʟRgeCuNXe?kRf2[Hޯr^'>~DΕa6oywt^;uGdp?R^ˤFodi}>_>^zLҠ>mn|i2q1@cosr1Cq$Q 3 }&HC ~&;3ULv =d)ĺ࠷FK}#؃i*G` j`k96x%W7ٛ6wPv,yyZfulj}wt'-B lJ'xTRLzSNq飶ěUȴ8uF 189]1.lL2NB0 7fsI3y:|2Af([*HZ$$x#eQH('c#!`auYƦFՐ= ւh ПK=Q"#a:0- {P>= )8N'bbFmJ:ڤg^ ae !D^ nZxbdB@ ;"F06dU3A3 hG9AQNnXG is ~1f#1S6̈gĞI<fL &qWwlK`Яo<ǒ= lɶdY,~%*~T2D%LdBR m?Q1》ŮakcW<-a-ʻ{M7aԏ*7./=nv=C)HF7Hj\Gyܕj. ǧ =;޾ms^DP\̘crHUnwȓ3"QPExܮMGX~ o;-& ȝ h4siMjR3V( ۘ*:'hR"f=ENFXfX)RpT68ۘǏ#IϧsgžOF26&B;0 aZay8jNxj^! 7RC⦢V͢.S{X3In9 ֹ<'١<'Rl}SF: 3,eWH>F+$A !Ryɘ \dN{^~`FB6 EkBrL&!ˠ<R^~kl[nx]Ljc(#mAp |<L)gkb^4cM,nۂk z"l1'Lvt=Ec2(f`vB5m^ug͠a;@ہȹ=>D >TU>T݇JCw}j{ɘVp )|4nm<^UXvΪʺ+άu9- n(BΚt]6]K>hx128'^omTDlT҃Py㟭X_Џ o3?&W^R|O eip-(J֤ dNj Sī"&Hۈ1O -%%9\pƽU)5i;k1ߺl1|^|zܶej ?PP=ɘK'-Ggc7Z`t CE]i-rt~ 5;g慄FVV50}g`&qVpcjA% Q Dyy"7y6|,zQR4 8?k5Li>^mf_Ύ-r=|yо$}^|߾^u$(EF#eF1p)4#aך3fC< ^0 z$smVs[@)JeQIK9&&h;[И1NEE[Qk%9()`΃&&);e`h͜-}]B!oZzJL>nwrRNf`tl_W*7<ɫwx# fL2(ѳ hl5Xb뼒Xy\N=kg@7GySl-+=z6)!<".ȼ́;tQ< O.yr<#G>R ځNPµ,)-UJ@A>^jL^ ko;NuZmGġz)j;;OH #[Kmxq标$|I7٣io4Oܴd;0x9RRWIC3KQFSM5+_0-׋Oӟgnk&f?ͫ:Y/ɸCbÄknFk#{0#m1U5LQI1m\+'XYmP2ivo 66ȉr wE#rQK.``ѵ:Oۈ^gtL.,clo*<~R67oeo1St{6 3,jΥ>;6>qcw pqyvݠusMj- AQOO':ݞ9osƲ,L;'0CϞ;n,ň6v1]]wi;X9g~@0IE32ebV0<0A2E .%CnK+ӎZzDcwivuѕ>͍]ȲȲ݋ ;U=nz/P*eEB#[ϊl:q4gWp7I9HHw掬ym:j솄=%D'D|$Zaֳe^upS91es]& X%n+q`3H0ɪ ؈e)+9KQ2m͜m5.w)׋U[M7(s_X ;LZo<֐U3/Lbz *5굪4s7NFR:u"EBde9Ag3Y8}Y}:Mdɾ^쎶OdL*jTNp@($VC`DxI 1vjM]yJO.VAq}Z{ܒQJp=0%ru\b` bTƩAZj֜u5twg~GV]o*!v=P슒iQ:2@ ]1 ]e׻+н.tRJZ4ЅrK?hajvyq[nuPB< ` w>i2Ki/&gD Z mPlro =ESܚtjnnY\KΔLV>WyLZ ԲzgЋ>j4>[n q Pj.#@y:gԈ k!l[{86g =@kK2B9#[p~Jpmۓ$v-3Ď,ƎVw˿m$(d2L9JNT^pW)Zt4JV%d;Hk"'gbvQǬ %̥9Y*iT=${ L,)jQx} w‰6/'Z38E4DrQTh]rth|V`72)=-POm[|Ms5uаF| 5<ରؕ(~;7oF㛯.x&K_'*xZpd. *4ePP!sY*ct ΞRYS7:ͺ qAAe䥏"CNd&lQ )g̕2YV6&}ryZrWӦ6I_hFDXKeh&Ax:-tLBz0<2x5FxgUZ. t&Yq6>MN木pTCܚ9O{< d2kl}[zٵ&# qY(DB( V=,!sMf YkzfB2Kaf.UGpE[i{WZrpXNJs#*"s }b<\V!\Yd}"6b}+V(:\+- pf zr{g,̡Y"xy;W>\=Jݱ( vzPFtӳ FzW\WZ-WJzp5X=+9 \s \keXٵ^ P;ᅳqԍJV/Sj?Ѝ9j;F >S^t]\'iVDË.Ƣz :Ņ oU]s@iE<>޿0 ɢ7c-kt/闳 IX8F#jzLتJ+5 ~yinGVxfV !|Ϊʺr Qhdad_0;(˜HUf+/%w^'#;\h. >V ۭ8ǣ_s~gi眎WGLKq_$_FHxN*P#fsAʂ׭Q>u6\ؗHX+\H=FrVjds \m: WJ3{+i#*K \suoɋW[\RU1G쪘l_Xk:XXi{+mT}R!JU1WB_XMኬL pX&OpU 67pU̵/pE*$XɇW +vE g1{îXW|ˡ$d98d0u2.I0/;FFIj6P׭ߺ”zΫڍzY4ٷ'GǣyEt2ݑXj!J :IսܔL[e7·E?{F\ aH H|XIK|ϵH(9ER2v*$<ުStCw͝ߟ!_5/n:JEL?KzC[}#@NVzݬ>ȋW/>dx/R;@Y0J߶wo0|\\"Xn7 &oCn/Owߕzvo$c_}擔>M}npvruKܫhVGJm?ۏ 3Um͇!/@$P=1O<<!<-j'O_fg1o>a?o1C=oYW%qvQP%egM rtV򍢌"Ԉ7dJ.ݪJИB*7fq!Lm T>Ov:NMvbNOBC(B%I:,0e*:1g„IZH~N;"PN7-b1&`-h:h1Ѧd鱁ϳxI5Kc8W[1և*P3u@()%5H 9вb>;$3q5sƖjh\zL-x~ Lg}w欷!6.M݃6`e,h2 ѡL6(dC|c,cU6F 21{WߨGJ<D=@#.DO$},!]rզ:uY(/`TJw!IBާ$YUuP i(%5Væ tF:r|XvIkNֹ[ʱ'Wsqu~1 mƂڄҙܜ`/DmTW!DHT8k]6Вj4 jkT 99$]^FJP0'\qih}yM.^O`|ޞ:8﷜3Ɏ @͂")nn8c`3 =k(] Ϊ2UǨ]Ǵj\kIQ3QFhћAi7LKo=H#tc8ݐh!/Q{úSi*tyFܡv֤D;TP=`ChuACJX@25Ti3 A)[vƾ:B?;?uŠxMR'w d#GPg=CE7,F/)>Ca E=FHKCH4Gb<RuN` LB)1QFXǤz=4 hc[M+5>cZ Rת%_6ϙ3T;:\Aj},ޣ]ےYѼǠhd?jo9hŬb;ڡb*fM֔70hlCkV3^F\ RčrXzGIz`lCi#8 8%_RZ7$ЭME<` \t*M .Fgr\Um`XKEu,*f-$ۯ+6BGR!6OSZ,;wQĵ-* g'>b!hE%蕲w$ KBߝ▋Rݵt%{A5aA3p^]9}p7%T:ֶC0ٻ+[5_]~w V9Y/Ү쌯S/o.. CX~9\],Å޼}j=l]zMxO$WϿ /o~isvu>e~r*tskƇ0oV?Sq;j{ .U\N>!_@E9Щ:''PPkBgp>)NBIRQ@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $NJ,BEMJ/ 3FyMq='+TN@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $Ng, Ār R@6cw18"y&8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N q;^Q''&:z'V@ 8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qH@'8 $N qٻ߶-bK{"mw t/p -D\=$ II-ٖHbM*rH~3sߙǙn' ti=0GKwgZC}q} Ou=7glIڸ!3ٸڳqIpGq PRm\z t'-dUg)1shQnJF:s͕b+TkUI[U4&UBɺm/\iͰ-2WHsְ+Zy e ~G=)RA3W[VcW:PҝPjtdmaXgvz@E 냳mbp[%Ht%+@]?)*71υ ^lr?g?*gs =o{,'fCUny+_ӛ-{5liW*Ezɗ%2?&𫚂7n5\w*/tPz~>?Zf1cƛL;'3bѴ]&\P[gB &/RBZNV/}%ܭ,^>?*;dsڇOhZКCaQòySE]͐bL`Xkt)tBi@ Йh9&H+L`_2W nͨL鱛\@s%䜷\%\\D[UBK𱛫\@s%9"M`EkU+ZcZuJYf:s͕i!nQ0aF!s8EJ()i4OIZd0GjOpIkƮZv3 ڿsşqE]mXzvpՁplۡ<+❹ڵvp:,sh5&nc^)AY!sְc7W zJ,x{|ۂ|4r+ S6tJ:_e0n!ҿܫ6dn[lE^r" $cLCdN)bxѓ/,: .45qϒXQKKI7ؕZIQF)3ZdSM0&%&>ŲpWjA'˾JȢe[J26Et~El9aTQȞ}yd4 q_JEǸ.'qE)Z#C-ҡY():3˭WRW3beP/%#CtjߏRAPC}?[[LZ`Ukğ 3.|9'D6 `M3\%J( 4W\ d`ўWVd$[;>.R L.]$%,Cn'g7f| ^ٲ.gqoqNsDQ0(&4FQ3% e.JBl3| ]΅HƢŒ'A%wF׽^د47ƱZѳ< oKZ )lx̵g1 b ϩ"Xz帱D,0rʽgϑAc2Jad ALmW{Iu> ;pE9e- Wʣ f__VڌPYqi% 8::znzƄm1<(IqU@a'-~qLSfzcs7/o ?g Ѯ,J%_|vTxЅ?_o9 _Txᦛ/U%6QNb`jg.nKR] r/Ka*gc8bφõ2ԦWh6b4;{!M\ O [}s<hח}g)OΠJ ] ⪜տU BpxյI*[;'m~k@cʺ6Uܘߌٺ] !"\ Mo;9[?-M]0kp= o^&WZlx6o7H&QpmS ^ҤQyIX?FOJw; |Gq/ߧS* u4M +˜R;i\tN66Je!٠LfL )&z_ӥ]?t)<XGm-ҙLL2e&Zq)TX JY7XX(ά ukXI^eJq Z-5Cp t[,ޡ%M*/2qk2(X(6C~z*ᩖe3G7ONvW?KUlK=JbWgHcǎĎ=MN9[{56@B䌂}@d u(hãࠈD`KE<ǎEU2R,RZV ha K8 yӾ\.DpCLD@B,,xGI4 ]k5qr6q<=f=YL|ua_jߜfB>}MqU^IdSTa 8K8fqYVqϔNyQފV ;4Uml׋n*]T Zv>3eYIgn"H1q4@j,yZ'ZeTJ(Xilc=sT{l(ި;y-Xhm 4wFm6*GT KIPj@Ca n g}z<q^kiZšbN1/>UZYv3=ĝ%\˵?|-dI mc\!(".%; ^#yb෰G+Qg b/!o"O " O|#nYY)ZtQa`bJ(bZ $H!B(h=f>s5Y &@5xTxG$踔FǤ"X03,*8b`AySwGn[}, "ܐW)kMY$L#D4YbAY^t4"BX[ i,PiesՉA?e}_IC~1#T+9TU8*Xq(' ^ਰaEڴAchG%ifidS AJ˦W6*mTz;jkcN}I 9*.7$ c6\p4}Z}zdOF9zNSh  nϗ}׫Njp:lP, c{4D>VmT$NU+U_zOuK\J 6*D(x󻋱q!Ά\WO}UAlGyuP S*RgKNaqҊDɒ@ŒcI,_EɿͦՅWufsnuEeA˔|pR,U\nϦW+F2 5KSzI>/KFeND9\&AkjsYJ@k_ݫjc_2II6H.0~&_']3wb IitJ/ǪQkY0rqMb(^C6c j)!1K{8L.T̆ޜm"g}f(bjL VoU7Q1 Ԙ .2*"{ X `r lg?:g ߔֿ!'hԣGS7l.">"6j I#4ڄ4`<"*$ZB!H LD<yɀB{ aBbva%Vkc&8Im {#B 8kFiCnIή)l;јLz'yg%.20mw|Cawi_bzZBR\O$3C00 |ȈR4cK2XeA!t_9Bsн9Z ~IыD$έvPGECAh,=&G EnJܤ#"(D8A[fm\$JH!Mh ]+tLBn*=XlyH4@F ӱ53}v)V|w]vHM!)i|rVtHChWk:XIP(ODn2m2OYMGud]cM`>(722 Ʉ5&2cpE tp2BVv,:[IH;/t+"}6nc2g}QQLUrU͆p'cz(hc24^FSTtGtJ/8HoEE9c 8:SfNOn8BA$m^LGl*Caj320{-#`E ?{WFr 1OW;2cv`+yvӢHnI^odxQ$%`4LFeE|EVF:8{B-U"|"[B &~}XSlyZj| W̞o&\eawF4sfl;nvLjK|[EMXGTS{82uG1?2k??O9t5fGm_zBYȪݶf^=y #C.'>kG'_y;&n!=N|G zKJ5ottK禳z̏_[j` tmxno~"\GmXvTo ķ9|WerM1h8r6+m8c1x@%BTdVY*$' )zX-\$pg#9H6B$ H '$&LF%k1< 8Ma -]Am֣v`eGn#fIe4k2*0@pC*iL))*!iIz @/5!Xd+$12#օ$TG;@g7NmƮ b18yaD="+:KB 6;QGsS$R$tZ@"є'CUSP8ZDI39_T1q~V;2rKgŴP\qQ6NY#EDư0:B8b9O>1OcbWvP||+}_U >DяQJ+qyo>Я㹿}$"UGV{TL(D)ʸǭhozp;ILcP0ŝ ;b%hNf9u·h!Y &(Xgd?RʃjE>wmK6F9wbH AA=ܬpM3M9Eg~7]^nOU{اyza@x1 xX:OjUW߂a%NSL"m֨ T1bM3!zA-_pup=EZ}Xb7Wu1D:k֔z"\ІH+ >itZݳc/bSOmܿ_Y܎B.F8q\2b ~0m\[7?'_-Yo yvf)uYŁO1ÈtN? /\lrξޟz.I/WIycAwU'Mɀ 6 Ϥ$R+& #|#Q!z`qK@; 0RT*PՃDF G( D™C 4H&p&8yΩ`hw2,Y{gb!~碽=kw3dz<]QWGrDm[.j|r=jYeyʁ':"Yg:jd4& Z$׆rO%8K큧 LnסVQOb!G/G` SAq<C $,+mP.)葧dn(ՃG8.NgoE*ylK!Ҝa S]"QT'|Ղ%;ōw2?3F_}jb!]B=n803,ėJx6f)j67*M)+R,ku%(4\}.b?:)Qbi`176Hb-%FR\@sHҩ(IOܧ{܇KBڟO%{TFŪوu!=W W:Ǻ(!r a#MqjUj_n-Ƅ, q:)DZŁ Zuoˡ2_2leGԫf3;@ˀ׻u3o4y",^qzȿ(F梯mm.?cp^>?P{\]{ֺ8_i.[_8şzC'ܬ+z{hh#>cV5{V]!`!!Y?ʧW5zUBZeFd\C"r`\Sϡb_>lKU]xF%_INk6gA1IڽN̎mԁ:b@|zҜ_=qE˘[[Pr, Eh\P-5.ݳlǜJ 05 Sc :WtUvڕtpVrxſXu U8J9qjq|cM5#_ gl_J(pPwd%m퓍l;l:נwA /Ԡeɷ6Zt֍#1C7!@Dw^s)"VHzaRb md:Ҳf&YfkixQ:e@aBVVXe$#1( Sb^r\{~.?xOrТ0l'H8& z,l}FM3Q`(f\f7̏*M3bL ?WSMGv+&RBjĴgpHY5p^VQ]!g[@> ~71;},nih:z K-zpƉ27m.rt}PR_)q{=y^YNP1fpS BJjpπQPg4(oS$NhbY tZ{Í ų57|<Gm%ԞzAY '>j{RyjzIЅ66zq1i;#BgWY\ WYZ],Q=\Ez'&m8ɷ3.8DžL÷q|{^!K]M>I ?_irBRlg1O^R}?%lTO W_6Ncꓘ~abb@hAnp\ޜ;&hNo=53}u bYtjs.0ҪS@=OJ[iIbUȳ+א,-],%Oe"\)*<'RTgWY\~6ϼp[+C*vV7S{3j㾯^N-NTzx\Zǟ{l.bZ?p?7˗Uwb#eev O)c-;42 ̶dr0i#FAf6y )Bɶ0\9Μ8dZǭlEv4C+]%iWmTQ?aF<Ij]1f.6;+-ԑ;O"$=HfӭJp\b=Jz >G1$\-!2Pt-Kpk8+h7\<{MgUdMQm+g:#8b.·4%']Vcـ(J\r yE$[PAw#.iVfWa)N[K8Q r:-<-10 G)物3yYF$UD=@Bqerܲ%, ]sZ9jڹ9vБsz)Wk MJm0d?*/hm:k \:WSuUs r2&{Ek,L!g) t:'I UdhY82B U e&r$EiFΞ n*"k&78( UM&vXxj7vs{>YcYzƢOE#jDٲFF4"+bҢ&Ő33F:1GF*KeIi볎::㌑hmZI\%"Shp+ѥDPVU15bki#s:[CnY/N/vzqkhWYT 0<3Ư hֳ ~"ISN/C/>l;Շe}xETX/rsy?>#l_PNx"rbrr勩]]v9 w9NrUr֞m%?'+㭐 YeʕOV3Z,j:/cxi3>*E_o(%>nƿ| [SKX6)/mOR>&=k2یP1QICLxd2}zP 0>^I^8;8?6a|eN\EKY&̌qg5`NJ~3rBYqm 3=fy}ET0e֚ػ=,.%y;WZ&;E=]OM-x2iY jE}<ǂCGZa[mr%>r%|flb!(-a, $}lSrc4!Z2 eaԞE;>W|>㏙B8 l>D  I2BC r}w ''-80mC"st[mw#7NVuNDDdrOPn=)|\ Ph*yS dU QT&| jʧ+Ʈ,E60FbHB5PK'b1m7C\Ii,{KmO7YeSnK)6]^v Ou|ty g%Dhk*,ȴAhr:*Ҿ:Ks U)>Ra|$1_E}.YD:-PFM2+ڹ6O֧KH($uPr9f@Rsу$Ιk%87Zz_b`Ev-__.HbEc. &Ԯlw'|{o$i雵ux9 79C S,L_}ⒸZyN9R.lXetږ/wPҊ&b_ɗPk,T:_z鴱[ji8g}To)) g Q)I.dCFW_l G985ȣՙ,d^`6_tAԴ m`[kmX'dgշ@Ax論^-%T%qxPJ5itW}]USP,R^IyJ,̋N_r wPӛcq9-xŸq<[-:x_;T Lh2r* ltY8@`#r",a d"XJ֠Q+K9&&C4sSf"gJ-؈("#Ip١$7gg%umJ|[=3_FlH{;Etjn&ZgZrI^Dz7̝R)2%ѳ46tؖ¡XzG6O7su3iy^s)6hR<k02osNS.j5 u݅""p0rP2:,J',id䓽ui൰fΤ%{#W-ZdvW š(CۈK)ڲ{ّn=.Tӫ%^FsLH"0.䀑 y3>U2IÒAP3 *e0~Pa/x`A%abΑ}`4(%Ja"#0+bY/;s_wY*Rݡw )B;j+w 4..ZJ5'~*~\A$Q\\=y ù;/Wc ׍֑%6\Z(_D\]_/2+δ_uH#UsE9Q)qd[&n ^ޏ>7/r:O_Tʆ^G/Oo^?6_OR3%ؽ{P2Ӱt˜I p}3(rݢ#Tu?,*,P3j;{/3zLFcnX r:g3(K}Í{Nl! `?ެB㋳|C%ą) qX:Fks $Osd1d?8;y9Y`|}E7t-A&Q%<=[oG5cHȮĨ@ZM@+Eի ƁAVKv); {U'\`T+ w^R(L-j'@Og7煽Kp{v$$h]Jzy- ޻ޢG%E?Jz'Gƺ,{n6FLd,sbtT椎E]&Aq6;n#rCIqL:BE"6J'UVqc3rvɛqO8L\iy[٦%u^q%j ށҺLkQZ+0wJ̠HJ^ j^Tj,7 JrOk6 do6|!^='xkcM,cLޢ #Iq0xǵ9+4RFVK 9^,D˗ 2aImV[$M B4Elcd90L{ñA3b>Ef\lb PrQ3E,VV'͝7~M{}~]ӾvqߚvдԴpGM:_m@r"E.=WITe9JJp&uxzAH,;ki>PYĨ#svtഓJR"&5dZe:.cnd^0M/x|zov-s{QĹ%9JeIEZY,W20sQ8=쬜\j$2;7`ж`n]x|M#W˗m2D4ߋJއ&Gy B}BpEkN O FJdb7W\ U!U!JwpEԪcOB%"\d/zάMci/,y;hVkyI: T9i)e2{s}=^]3/j5TFy)sV~y`JE=y..=*aNhj/xK4Jţ}+Sw*':T dd`*s}c2Oe.wrmD{+)X,*YcSrԓ0 zqUL{\h`]g|w*Š{5mmL.G?k|5i:rb\sG7MQ$Fh~*߰s0@#8ׇ?U%Gs B%WEwKPi`ɽAKvBpUr8"j UrWoDcN `U!OOa*W@uWZ1P֧W\s2ASDfw*T! M•˫IlGfZؒ.+t}gmweC]IUe2d :&g |n,3+r k3`-_eջJ Oڷp9Q_l׶M7Z~v|N=}rq65 拈?=g6aw7-g=`y<{r. 3kkjݝhu4#ꏣ`m-)MPդNQ~ʺ2^hC,(+'u©VG51u-/Ѳ5Znmꥠːo/`7_ KAE 埥YVS-l^~BH}-Ji2QYd|}AwOƳ̜@DYlanz ֌ "#rjR4qN@p`4n~.xK&eݞne9A: {U8s;Ԋn."ݥP+UFt |]$C' IZ%UI2*'0jd#%C'C%C4@>]rd[ϲyacDJJ͜Ήi eRa:w츍`L0 |$0 (TfN[KW;Vq;#gd+uz<}'6},QLt.>~vŒTy5I@iU[ڤW`j3/*OLg+Ty'j%}{B%ǡ,Z:7,zޟ|_yV77 JrOv6 do6|>M*xmRzg٤#s, 4uC9k+E `1iWEw%Z^8D'F::XG69ZK !1%c@陑ZHfl:6;#g.o%2r3uL/RVNB8ГFڄyõEfz-j{rꝒrt .BJٻ6$W4E>4 6v~ci (HKtR$m,2#"ˌW){K%Xk) idёDضp z/STʂQ<f[i&Ύե $O>CRNRLCA3sւ ِGU=i,v0?P~uug4X^xY@AP/ A_R*QbQn5Ӓro(FW+N6vJ6&Kb1[TA0R$ʙZd4 *N)׸~c -6SC[bfPzqӋL6F߼K>f^>w/(}u%>U)Huj-6&HBJ:)0!HB&Y47#:PcM Z3C%}mݎگ@^>ܿ_ѓ&aL{Ň+^d~iy'^?|_7ӹpMs}UZfq )51!e`p8zMm ZNCC.=: '[#-6(goR7=cJ66vokl `Hҙ,bbWUOG]RpysZ%(k1Y\qa$ICD6]h & 鬎1x]Zm֗-H֯I/Ћ ^c-*n6lO+|3JTSE ^MLldq)أCKqҹm,߉9bSm6ʒC+%i%M%RC,@ZRHmݬ.tۯDXuKW OyIȆ[ ke*0 Y&0& r*$:5LQ,hԢ 7f\SAoR+[[8bsz/l3Xv!|T5H}D1|PUYigm8Z9V8 8qUY'kr!X @`u䓳D1ick B\o̵֍ʧbWdj~i`4Q.I_I[!e'@uD km0- EGQJcI)X$U(J1tЉ|j0Jg}v|{;.}ЩxWH kJbEM*R(JkJRJ %x5Kegɦ J E3Z`5]A%KQd#\wK4ĻkFA4>4wkh3=ڑ3{Z H 08ovo"3b2t1 +lRX,y-UK==ŏݾڟ!o c#ځ*QR`2(h#)+ZǦ8Gg)7-jlhU[;_ _8sqjpyX?ܴK2 vƷ.=:::„T^9)Vڽ;U_Z3tZ7ёd D5)ԅRcq8Hٔ0m}kGރ ^}W6k$Fä1\h}H7/iFgrqEU1"bF yAU%ia`DJzmb3DΞfB$QY9:bSߞyLX_ӟ?6!Sծ0:K/HU;#ntvȟAWx98w]T1:Y7ӝM_~ߛY.MBq1Bɸs񏿿%|_~~܅bZ%>n->G^$6:JFi7U/wXm5i}{2A7So.fۿo? gm%'SZu[?m==UKRo5?W^?Evn|tFr=dowuirb>RK{ߛ<ò"NTG+]<fp:k1_^-gd_䜍d)t;YPjLiɴ")<}왂~tOx7L[Mn8M-(*(.d`]^GQ>vybvUmdöѺ%G};ẃ& mxxY4fO 0_QtDu4iM;ϭ(H߳i / /%Db1z-dQCo<j.?yt͍lmkMl锌kDuX] Fx##3%p+NlKo}\XS;SfPC lT($Y9 !YE?ajr4Rh{f<=-oV䷜3|BÈ=t 'Z 9m?.U`Ϧ |s_ ԻW)]࣋÷,Qy2j&T3I@ T'(pY}ַ/ՊKt+.bDq:TDWZ/"A # 3[o[!Zc"[#b+zD M. ;/tiͧ8۹s/tJE*6>x7bͼoOt}uvyKs#&]LHLݫ͡G&R cֻ#6x:?fsH'ҜGPsqy,Û k}#HdK):٠刐#kQ= lCwK;iW_QنNyË-w{uX_baG~B|^  ?AkmH086C@pd_6M?d,9֍,9Yj([oSif꯫h* ҙyٙ 5/8HoEE92SJP EBlOn8BA$~`X֞ /3r!]_"%ȭЦc90ٍ[D'`_yJgP:*Mw]no\ ̠} E.o{(꡻3<a'#Z &q-ĪEvMv%eϺKͺoa s$[DW 0=NBBU P+8-X.GYDNhq*]EJRF[DW!Jp۳ЪyJ^Y(ЕԈ"Ju{.{ѵgUBQPJ+FEt Jpl ]%x>]) ҕ֒*sZCW ?[W  _]=^_`Q@X{0Ah1?Sa(+ձ]O#Uu[ SۂϧᎮ"]/(N<̧x:elpqVfكĽ{Ne/TxO-.ed[Ƣc\ggY؝ʣ8L0fa<^KV"X#b\˰{+eR1EAZaL>gUgOҳ䢓;kHE>Ӓ)E 0ßU"N~F'1,z78 4r9+:~H A[.&-MBu=RΣy %W t!ڧ"a!%f0ߪ64&= O .mV4u4ia"Xb)ZCW2ϧ+@)$ ҕSf+@k:] ;zt% M`,hk*BUB;zt&M1 luj7>h*7HWQ "[CW .k͎hB+#Pv!鯇گ @sq KW•MpZΓ@ KMtU꘮XJ tpn|}=%% XAHCp"?s(tTősze46+u'AuH7eR`Cz>HBk8Y`[OWsC;4ۡ){Gk*{czO*'ޕ` <TD":Qɰr \T-bsԻ?܁&s/; a1@)~hWmXk#\8/7GUEfDVKpiО)@ K}j/c4- ꢫWѶaztPV ѕdv}rөQH-P%MTС8,ƧQsEQʁkfwR-ߘwPPOnϯ9L!2 ,%  ?͋3 h0\'{ 0vckRO?Wi|@3| ~S\ছ7l-~rW߯S_yDѯG5ı/SmXqRu-hO9PmŽ?,$ //6߳JgXWu.|40[bp#KuVwfr /w4k,ɹZzKV+p ƹy?}e>s3ȠLٔ|WI7tpVI ^Id¸tEXÄJKi7 L)jLyLPe{5neȱ veY|5rWlbDool[rm} /}?2@ߟ\83&mx)bЋh${i8')stf)&גKξh͆Ӫ4ИAR{HzWCPҩf;0wyMG^ӭ[QHϘS|TFn>ÿPy^smȊң?<;3 l2*jFY|mXIujW@[]lu^ܹAD4B)N [O#뀐vdS0ڏK*R j"RSFDD b/<)&~ n17&ָ /Ďf2+Ay4iL 0N1˨$>Pn0QFZGӘG"h@QAsglrD`@ 萩yg^kgq<gq^Z; |e&X LRlUEj{ n <ؾE2<ߏ_c?טڏlଷ+{Y}o`TMnKl`b5fB*FiQ;6'*֩3%dJ2hQs,dk\LC5=9lXn.e)Wr^@b3+˜R;>piT:3ɬ`)$Ťs$S6+p8JnjO#oPF)K #VRku8Gvf8wk]C{Y*XFH|[T~=s_~o;gLΜrSmge:롬|&࿌y2MץT*)2EueM(#V S)N)A1p uܧzOn8BA$2!D!e!x0 @ D佖1` i2 Ξe7/f/rz;ȍ$}|nv%173ym` MLg:K``d,, I{{ m:v,nL2NR?.&o/3(?ΊCs;.l5sP2YּO$/ ^.!Rr>w{.뼏ΏGqү RW1:Eʼ9m.ܛ^yIuƿƇ.%b vC*[盟7Osvbe4|(vvf1Cq$Q 3 }&HC \QƹA.j Fb]p0Z>DEH5#p\5[Ēйu(I!n#S{lh0ȁ5(h )d4:9*I(Tc931hdIDCz- IM Q3' qfa2NbXXHw]낳{O| 0y1u]J1Z`*y4{}Goma}f/@&wXyWn8 rG5g$G[pNH2P2z-O0(#'8>jK ^Ȍ ^ee0NHnf nfV kӌc̅“rn6Y`7\~2"h}233&T(M2IH-) Cɱ:S/cSE#fHΞQkDJIO %(Mґ0хT53vmpv3|l[v4Iڝ5S&HU$*<FhTVðF -݊mW-~M)=L.WkmH Oaw 0 \.~F?%iRCr}g/(j(G险~TQ[xi )8No4R3#A y}5FжaHvXSsv``Kaq ^9\D]:) H2@\EOz)sqBF= i㹳jǘ2D*B42!ı-eD7/pK l+8V/OHowt7]2-ko3tp?yAtNrLSk%:-iTH ,OxbZ &9ζ=O8[OLOR%ϕL[b#E2d"s X.(\q+B`1!&ewzQ;^"N,&`C Hg8AndOn[ϵ5U6%H)$x]Q}PNP* 0BkR:& cWdOC1hM * @)1 DlL]{;smi>]ϫF6kT˽0/oIyg\ow&qIrO FkgSGjhW9?s-zJj$3t*C_?owd2{SPxos٠ w4?Mf0< K3ER>Y|v?p!BK?bƒ?e?ΦWVݦi ѻcdX_x$%o奃`hpGϧ,.IJL,t5Z-:w.\\~^Pp@Y$0<}j!^P>QYS˅ن^|_u^+el׽4`r:2 qpqQCEda)Cr!|K&X<[dZʸuBtE񸁗(z.Ἧ?D;EI߻G!&;L{s^},oپSVIâ]'96>E/5R8΃RJ4}r8ųa[ms S>Az<[.ߩ=X;uG/Vccraؘ $0,D1 QJ M2I؞V(6? yZ1 C!4*+,SFIR"֐l0;aji֤~ijMzZDۡȧ3>~^w:74>=~4CIaQHWȂj n+u (H8nЭeUc=w]l5v%#ޑ$Q6$ǿ#c 2"S!4$%Ԍpt !O.*> i Y{ź{ rgG KuNB y\2| P"GXJN8S$*r=T }m{bBCs_Xin˓yt}l\ gD8"s&S R0cbQSN*?. 8M]?by[9.w7mS_? G[N 3ܞ{\Fƌ0;GX{x%؇/տK2b~36HqWpaT0ö.fz'zHza⯆ V9GSD' lWdRZZ|?I!*9!#[oB@OBsYza34j #4%IYV$  SFQ3B LԃDDoT0 W1cLPQ3N\ ΁B$c&\#g.(Rf/n+l5۷Sw;:5ҟM19Ot2EN389k cGBAc 7ۜĩP"u3OB'zN|u)J9eQ:FN q,R*g)+l.IIF$O򺿼s?ǛKzO#ylRTg4]'RBFPG}\WRIftq}+sKf][xv_p`!aaG^jBg(&#!NvR uE 2^HA MT5Zq'*WD^u?$Ecgf ur Aw2uwJS Y.Aw+*%%o&S7aд# ЮLc|!4m##2z %19ߌ~zx-zl4-P_aHAsV+Nɷ[.2eRm@?}Qݟz`PY^J}1C'(Cw{L/J4ƙGG\(>&tGO9yn_P"@|ؚ\\n6ejQJ=J&KeBtOtxooqTv/N~)ȿ%&uy8t4LRݦ G/^ۂ<*vPT+~;6NxztZl֫G&9Ѷ P(˰M؏y7gR-=)_U聽^|ţN>фjGnsOը;FcLR[Us1Xqe9O6&P!'YZ1OZĿ>Ϡ8IQYYfd&(ݤq9bM"XTARAɗw%1 F^^>]J80Yj&5?%NSQr]}tѵ}U`i]D[fk+c9Wo_Us~+?ɯC w8>` yl8>\q|OVz\jAOe"_I(K>+5ro }uQC2`YxoMף6O_ /s=[C [su|*ԫ^~QLB1|w{U |f:'˥$KKŬ*˹ = kݛ=\l zqB\n%. ҟ;5Vn6AtY:1{v=)' ؋-::YfN>zm8U疽S/|zQ`OT :ɉ™ i1nH@h v - hGGE ((e Nb6B]IBێgHQ}g wH;43EcgJ#%&՚=&W Abr_`LJ?kSl89q\U %.Єt)^@3WH0elU&W Զ]!@'^2S8M`%F\er ;q2PmWJ;qՈ+s5(8zNmzE-4qT)q:tAi+$XW\EE\ej}0tqTjJ:qUpzF %F\!sWZ.2Bwe+̦AYޛap/ qyIΆ5TQѳUa|jS: eNՙ:Z%w-mI .F2RۜMؾb~Z\S$CR gP"%>F"Ǣ8LOwuWUd4# t(YSZf}<[9Zy{^r;rdMGr2;v;E}l4E=pB1H¯*:"*)SZbN'̀ 77s "^d0:}/2H+1L{r%\Јt`0ayΠF?ɾy~~yQvn1Q=MC3 Ua7lʚ L`zik#Nj ~xYdvCfj);u3`T{K3<[Gzų]_kNkŮ[+AW7)Իm :[gJWqw5{DgxLjx$ޕgͻul|Eyx@ŰNَa͸Kp%D[aWiMx2_b(%TLaehzHGsL_ -d*[哳C~25Ea.] )ԍl!H;nyL>doyQ'ZZI%61HpQ&ArF0ΆpIj;i!B \ܳ*x#(G,O0KFi7^>gj }Z +,dF H}?mK5{ l3BYd!O=ЛlkTT^V:ID*[o]YCI̩@jIب 3֩!+ɖa Kf̡av .Fx8V(a͔u{v)~c_UCLmPmI|jTc]~;xmVnE&UKY8xJTRu'WY[W` դ5k^Zh]=ERզ% 2J JzߍdX •&B6eU"UV3T*|tk"v7#w Q/ڈJl7e=")i\%rl \%j>tJT*qW3v˺*_hֆg'YiAX'Yї[)cwU/Vȓ mti?~|%P\ ﶤ|{1bY b=OIjS2_YpBk&IgAuѠ{rɴB0QӤiC3qޟ{PIw2;œ/HG&I2a|+z˪&78830S Ec1-wF6PA% XUWKJP#ٌC kv 1_EBmK}JW )V2""&ZH Xy$RDv$H|) uָt/՗;q`oLYXv-@R fz- ƹو`_‰hm3TOKВAqcsl"`ipPT0ɠs[m ^8LUb,YyMFO0E4CRA(Ŝ(aHJ9+qeuԪ 9iGͦ_l ݭvx-ئ[ܜ  u8Ÿj,UgDwx7Ӻs[e.ہЦ¶_OW,o}+l#; RTSǬloj b ]Iw" d&=wR-*:!b2u1Ýt_)0u'Uoޕ~8Œw^dfmeY۹M[cR;ĬW00w7g)Oc.*5b+ U&]tW^̌Ye)Lemܪb7"pzmz>tʥOat_5 rs͙~K޼>쯙س7l[ɗ!;/|Òd@cμ۰͛ŵ͚syV2j9d-lңI'rI.'u{\yD;-'j9=D<<Ņc”]ҴX޺<|seQjSׇI1smTZD*L; gK<*~tљۓ3'1g-0lz(D(k([JL.`ґ[u4lmxx`!ϱckK*+In8y=&z`50[̮y:qb^m1%IdSTa *q0/aqYVqϔ6Stw6C3M4HQy' ;8 V삗ľ=7od}@K~A$5q;vF>Rgƃ^uהrY#W2:?t͎1]؁o<y(npH[G‰uXN\Ƶ{\ށkBN+R3Ey2>M0&w,յ7}ήL1yfQTB]wF|UCϮ (i+ 3dxnWi9zo:AbsaGzg^^66-LJIC(ND#抧i iu@H;ӆt uuon-Ԕ6G)]t际l,B3ĝ$iL PcɅq 6b0WJRlc=sT{l7Nc`@QAsglrD`@ (Ut4s/= "edbEyOQ+)H=˙%]mϼLՏd9AHase%iP#ˢ{#!DٖV<+lNZDm+8::BOǤ&C\pnZL9FFwG"\J&y$ Vi,jRʃp-a5a1rt*4= {q+;L%J <|y]eLN_;ZDWT^_uV{g+زuNU:ZtQa`䟱bJ(bZ $H!+h=fޣs0C5YI;7-7yHq)9IEd303,*͸K'bc  I$gqc!P B?sWzGe]PֵğiHF;K"( >딧Z%N#-ÞERQkjZ+#L$ c%;ǐ >q0 "4D)B"I8*,԰":N04E#^肏ޒ4 2p) -c8`7q҄ΪWˎ^<Nϳ?ƃ:-9sT<D8b>~W`hX}J 0Rş9Og_aRh@7ٻ22w+;ϺU=_ܙwZ;ɾE}"EK\]9)ԩV 1ļgC!q ߦcTQ}]sI}+%HVlSȸzLW:ϦG>'t0ER˚eT(~NaJ+2g$%5KYE&/ݲѻ0#PnȾ,[ig_ɂ0nm,b#ZZxf-Ų}r4gHYZ;uk-R (#tI/^ݞ 9<ս j,[4ߜIHQ]9=\<$. =fm;@<~͆e#w[qv}op_OQ@쯦ɚ>4l6ܔY{r#X]7G/KcnI9d,Kf47=<I/\eq#6V3]bu"yMz:Apˆ1K^+YCЈ!%Y3*ςʣb{魏ي795 Bax kb29<,/SI3K՜gw`870U9aZ÷7.Sw=;l=!T\1e8sIݭCoGhD:ND)y{=eMO"]W.g N6YW~ԩLnbr\MV}F5\4N8@턐θg==x~,RdxRt>UpJFyN7Zـg ># ƫ8ωG$_*|xDib`YӴ%9i$#DJh 5 hURH: X>Jde$e:CDM/ۿ1 #=b̵CT3g7)BCK$T̋QNvqA%`?sx%w_GJQ E'-{;BZ|jb6c_0¾-lqҮ`Zv! 2ָr# dh# 6QA C r -1+A$ڀg*,Dat|f6)̣h"Z ;NR %bPxnYӰ^|H>FEJXq4_GTL8HSGI u4i#*q4)hrJJ),bYȨR ZP^%t`dUzi=VY ;ːd(rYJd.3w*hL/ EDžqvބ,a.~>i|wߣ›][G7؝mw쎪Ux˶Nj&ȭ-3?ͅ\9Zo.>/tC/USs;*)yޚmTM7Zv[>ʻ=j(}<0Z|,}莊(P7DSo־_+'EK%С zFmJ O+:#|[/${/QYtRM b4z; NIv;ɞ Rf"F9Ȅ%'*F]ˠ4r FY%{mCR"u~v.9lIbLF쓌6+6{#qyKy)DE` tf@vU~WV½EMLIkR0X;9|-svOT &C T`SjO4$#\"$. pN爙1(X˜݈泴!1`kQ{d[ݲQz4`E%%ȃiRN'De(fA2!UH!3@*:J45 Y#ṅUbD5_;U5sva+xyyP8eeD#"mē0͒MAt >g]I-Sbs.1,gܓH6\&CF )D Qs#HJZp%MzeDfnD b4jP\TqF\qqۈǠfsdB`BŤM~2_@4yI䓀cN#.Fǡxh*Go]9ns_ԫXޏxuy}QQ:#dDh*ٸM1qѠqt{<ۣ9e8{5Zl?A XL/?Mf-Jo. u̞zXIuzZ A&A€ʀYrLI:s [(&=I^Tۦ~2.ӆdOWFoK@K=Wv[ߣHurq Ad$');H9q) e)m1G7>>Sܬ?(mv0$a %L;o5]'=7ϕ4*Q؆v@ .ˆCW"~Is{NR dtIU}/v^r͑YgWĐHwVhA 㨣%PĂbE!3WSu/?"3Ҕ4 '8 Zxd%*I{oKu1͝cKA+s`m)mf5{|V%To&;9C>DV C`! vĦs6(t[B?`;STK=JxCdߋ>;\ow~W.)'yi&K*W7/[W*}]}Ϳzx{|(tzYFތ h7;"Q0Ԏ}4Ev1euaiKMƪ#(לRj5Ad]* ڑu+p`yUH*I9Q2Z[W)&2 iNd,8ƵY#sVpĽU)Tt3K1Cـ1y(kMj ?RWzN3թzlIG1 B6RjO6]R9rCsq~[ϟ7x}M%Vr gb -D#e Bw5,P7C9O.JCJVqz~LCOТr]n;|~Bd*goѢoEGґt,06<)^|(bj 7,e,߿< / )Y.4AFg\k4ԓVL3{!4=37`fV81R(\47dS"{(+C5sR!}?=k1iR]VW*;GrE%m[jv<l1龩4tC[_LJG_iG?2]ŧB [ (WZ4Ksz&u?I!oiѹOG6ŢNrn];-vyt=..ϯ;4x\WwlRnGhPg?^>eh9bp=Jw9أ'A8K y+f޵,q$_jM>"Ad{(˧p pFگ_n)AE@h2#=<3KÄJiT&$),2\vF<ݦTC8fa^%)9li+ Lgx`c]s۹tD ] e2JWϐ"{cp܎\k6CW6=] ;PU`Ӗ fJ\=] J甮!]eW]NKWLi; ಱ[+AJP޹T7sސWO}zvv7%~R ޻!މ_{]'I۝Won?ɘ[x+u9]^]iv >@notdvV^vz»Ox])ï7^:ygI/)ݝބOixw>^G˷}rx/L}0>UqbX:aYX8 EBza片̔L*K|&t#ߏoyGiVw!^vWmPa!-' ^O?M/G(_G{hoz$~x=ErG \*b134kn|*bؓ Y%Lף?MÛkB[tcֽG4顴Ǿ ݡCmy<$[~:BZ51PL1:f0 =iZ/kՅEs@n/_X|%5<ٲPR<"e0f5}c1csvq-d+YjHyZ _ DE0S&Q?4$ȶG#-@I1&(Fu ̵pHRnax(Bt\`eU5ͨ륕1#ϙTҊm3}Nfm)g@oԱٜÖ-Onadk;ŖᩤTƑ֑~X; &D:fb5PX )%$ڙ)B_"$7W͋ͷK cHLrsU vcR<ȄH #k)!;D{%j'v Δ|Y]( _B&0`)VgbgwPQy@m>1h-ė&]cYq$vAButʓkB ]o@2x!޶I 9 q/YWeQ\ZjL0=Y{mFe?R`T# ߋ` G$uc6 >k5r9Pv Det((*qtNNU&guˮdI>St>S ֥̐MbXtLGɓ]@N`9U@J$jC 52S@H`)b(^RX"ɚl>A& q'2Cp_XTuqU% 95[r##V r<&{kl4BNG/u>}r=uw} Ÿ]Bl6fiX} ][{ #C+|uicyj"!-]%@_141TnV?yD(ʠv?]5Kpm9 oюa.EqUIb(ɀ!ZP M@ED]G 7åUeXJNS NJp"*iBPˉPk8OX+;,s)&j$,Y+"d6RꝦe譛!WdT XHL-0xxT_|V 儲FgIm%eC緸/.Owz| Ia= B#xBdK`3^o#G w`Tcۿ]L4YS֒u"e0ڽ6g#<AvU  `QDGDke0oTP=`G(mEAJ@r-Wdi9Mg0˨zR %4+tOχ +I$XÖ:y8H 9N&o Q1bpR ,j12@zu#ь ^ :H5-0g.T:aWМ>p>Vn5HFjփ*Ú>+|KV.|~D[AL6Pxup>'е% "ϯf.e'[|ƝA=-pWQ kF /51z7%àef@8\??Д n F5>dN֞;JOQ!, !Ir[1xxpQi 6rJVKm\U'KSX`jCv&f=$^C|Y.X~`4&6kAy"um DB4RѫH@R': N uH@R': N uH@R': N uH@R': N uH@R': N uH@R': 9]Z/{oOz`r@UNMOzJt\N z h'Buk9葮zFN/0_?uٿYL8;,tuXk) v;Aຸ h/Aus k节6CWf h#+AI+銃+.m.ЕJPt *d-ѕ ] ܼEۣ/JWϐb[: 9ImweQ*hK{WM ] \gBWJP]'N=L&>1]= }~퇡ODWCIGw+Vzs+N7CW-m-c+A3+9fJ;] 씮#]E!|~>~Z~9:m~2vwxLٸyۛ/K\f#λ_oz0gQƎP9IvW_g&/~h$yhbVۑ7}+<\w㭜󼡊;Fn, cS6OEvbM&gl-H;9$˲[Ur* ܶT*U$uH`HD4:sChHFќND $ÝsZ#k?Vis-2U=CXJ^ IC4t8)bZqgW(t5"j;t\ʍpwJ м,x5BZpm+R؈ĕֲAS~5ո+Rq*)ʂPU+"$MZ;ORF\"h[9]Q,K7Ղ+Rk J`0޳u;][ȸ&WԺ#᪓J.+=Тi* U++T-" WĕRK^(qW$WZpEjAK'$qepW٠E9餽NA@l.<|:I./\9 ]. lXE x-ԚG9m PPU Hn="fcȤW'+4"\`qW$ZpEjC[A|ջJ+@"\`cm5BZpEjzTj7qex'H;Q P{$+RTc0xZi]SB 6wEre5 E23t\JaF\ vH료W Cfշ+gћsf$cM9 v2peF\ZhLEBjpErjUj"F:A\ Ab̞ۘt\;cL |#S?K W1lEt*e=r=t}䬢[&AW3CjzDC*8s .ـ3PҞ;-vậpaU Iյ`+%Mk1bt0--F"\`~%yɵՌZqE*A:A\)딫]gRV+T-"f"v)J;!T+lV͘z"r)8&\b8E\YgdE"U++Wv9W EolKvS$W#?4?J50we;ʎ:9{  ~ OɕP HCz J0T+|nr5WV+R j)Hg{N:YƩEޟe ׻kf?ISUl'v?Oizj1{)ĥdZ;&sz-rUi5ӶLܣ/I8֪qTq1\T+lq(E]%f J lI<"*V3/Rq*OWFhSPh̎j2H[4l WPc$@\ǮHQtrj(W$WVH[/z\J>ɻ#^+gѻsh: vpp#M=l.* cuquhsW(ͧ+\Z7x\Jĕ3QHjprJ=t\JF\"٪hvFYn)4l.2NhwnUVTZګŸP5u*YF.ոXRΠ⌫K!/>W˭;gtnۮHsSO$Gr%CˡGrrc1;HN*]MBBjpErm5P-6t\ʍW+`Ng|=VJz7C[FbջJ+\MYZԺ Ur9+ePHjPc0t\J1+l[+x\^OJ:vBIS+"\`-ACrW ~j nշ+`=sfwGMPGNd(7'V?ǎ:蹡+ FT+T-B qE*qut۠++\Y܃ޠSt*A\j?IGvOݘ7vi ksf5 kxM Yx߯~ d67˫m +@>U?V//X +ש~-5o$ Ř&قP)yc?L?g~8ӿaM 3JS]7ˋ   tkk k-s&̘!eԠpoZ_hJfCynRƏ͝$%PVڂqgfޕy(6 [熞|D U܂{r 곾vvvpye]ʓqz7?_Ox!Q8yr :1"@F -f6A2C^Tx4e;.^{}{Ծ?_cLˣ8zr=Z{u4lІܤ!L U-8?!vt~vx[g~Y6?m24, ?Gb9Գ-fJ6+pcٴ.;|F8MF+Ԇ9[3m^]hd@j%T{8`kڵTbs7e CA(ei&d뛠Ve\rs( >~+ >ei ̢RL*sЁ3ᳵ!q< E(:fr^{ *A#vDC[ ЁXGW]dCaUvы CCh 9HPޘѷ;zT([_mf#`rQeQ-;Fs4㚣* ϳq(T|2>%ţ,٬Ln@gcFd#n{阒4--p%c@(# \ɜ"%Gֳ9M8"S4nqmL&mX!}4sdEF4&l;PE~K'YIPc56ZV%iSƻroHjjڠb{<TqFIΨq FrƧe7 ^jy]0Qz`"FjL1YكqD'(34v@i &@!bH5eV- !'Í(d VEXf.$" q\ƛə¡$zT&sۛ='MYVD׶%g))IgϟveWI`oL?\8mpo2#DNQh@! Fsb7I%6X(o{> !E"lF$F9`!]N LL0s01jڣ}v5 >mXƒ^;laf)iHPB2(fA~(| Vq2:C.1 &eF/J,bP{-6ϛ 0ՏC=F"D|6NYI20:/i EŜ@f1^,g YIh!I )D+Κ6F4 Q({&borHX8oVkHcՒCz8rA{'3ΉyaBŤ&/d4<`ᓐĒF.jǡ<4=ouVsͿCG8Xck(7&k'Y[rzVُo9bL{|Gs[-XQe7' jqo ܗ㭢fk3 ɱ-$G.sU_Nyu(]r89H)g&drBkHʼn6q$%He)F$:}̺'yЎ!̑BpCzOm6) `dB5`RXtem=3o-g5Ctݱf_evLalTr&0ͅZJ:I492QgbA= e 9s!XiIE6Sz^c3i:IUX&\c@/$Ib(+``g{ 4v3;3=WHѤLJ~FoERtQ*eJfEEdD3?TexȦu=5>>_5,f;:t]r[Q3iB0*&W )L)0 <t`DXȽ ˉ3NTqx7-AE2eJjQY 5IBpJYl9H]8*?O'{,?6EY~񰾝ÞY7cw1@*-鶙Df0.}qJ3΃ICenZutHi|c$IT#=*]jqDkDS!p3Rˌtoz9J a S$(s*۬Ui(yxg!>;bWqޯ>}+yߒ\ o >{5YN?}+[5 vyT_i5Q|2Gϓ2;(>eҬo& ,B%mo?V;t|KK|7VM~.aq:;56ůCc+-qr\ ,I (fs!ْJ`\R ,h56}\f[ a* 1 "% %6)낗dXU s S+7L|{wә\ m##Boۈ9=LlѽAw-w"z'"팲/Yzus[ $#}ق+QګL%d< UAT&\ =SO]K>Y).#Y t֧dQ{\dBDup&dPKL rJk.k(I5I 齥>C *4fI/fЈ"K [&j>ݗB]j|d!x5ys?tG) o4C>}e}FӳzAQjXo!u8XVR[S)Ծr>bJPǬv~ߑjC.X'] o@*] ֗!{T^#$d*c\[A$ JTRtqsxKx}r^pֳXࣇ1A1^PIH"ۗSZZ|IHR2ۗKBO>qIH m 0N%̩2_tHYkyký6kՆEp<LRaJZ0BK)fPȨQ$ *$NY~lkЃ[E OϳcvOSKd"r'iqεLu'3]^aȠJ|iE+j]fwwO9tfOU=uԲwkzjr@k-wxݮ1tȆT߈\ PRjSo7қ%q1%jO5n>_k(F[ԍ/ UD0k';Lif]*f&~aspD_@jA0 k@\Ja+e Z$Ͼ:B:~fr`>d82d,5X f>vQU ?[xq| ??]#epIL,(tQQmEbdAV0{!F{pP` 9dtV(ӆõ1H[= c]fvgV 甔O9@8RhbN)JB-!S*,- 9 }HsDwΚMyeI?M rRw{eoOo+/1ѧ>UW.xr0 wP%+Z%AےY2rm'nO;gUKzZJ4M]D=xW X|G5M]zc8l:N>w5]o]:>M !?72M?]wrs_E"`Ru7:y9aSfcXN<\}KzIG,BC&XbQtxdζs.v< lOȥYH( x(I9ɸX)3O1&UL)b--̧ R3" YCl#q`I2sOJ3ڤF BWv`ċS@ZGYO{F1I".켬z1 hGD?OD ﮅ= ,wo>^ Y,ѕ"E:IDIj{x*Zv<Zt.mP2ٍ$ٓ  ҥd3 R#ff\>*aW&Y@:9%A{<8!;<W:xӂd$ q[#Z.wIees:}*^LY^]&'uкhf|bzLxոcAxޥw5j1{am9h+]*ѧ̀myRtI9;tK ׬Y9 w*/FsH*&x6)XЌ($rmh7XWEf-u6'k.(c SLJsPFD0'[Ɲw.*ߌWk7c1-S|/|*HS]NnrB.]s+vd,*".G1g ԢKlqld R#]fhSZ~Z qQhT*ҁ$'$eiMHRzؤPZ&p_X:c^[ȹAܠG,@Eщ<ĭNenQy4_~ŞǺc\r cоT#a4!5G s2{odS=u&9G=v?vV10g2¾ڀDd, RjjzmB%6XP8Os{1I'gMZȈK++'*]DUyfKgx~ٱ')xZb4TnB$I5rcrYN,IUD'0#O:`  RLE #\C 2KedpfÿV_ɲS ^9!nzSꊇңKDnHoS.rIJ_?SlWe}p:C\fܓU~D*TrINVW=6f[i%e^ea^4|G[jWt"\7ik)H߽7-lm6?G}[qwnl/`zXhp8i9-l[rbi=9q2E ;e->fF]uM4f*sUAWjUncDzPÍW;Cz XgҒkk 0CJij .V((Ǹ9SZw+Y͵Ϥ'(\#zחٿogTy+n>mz㎵2'&}Bԕ*-³ϞN'"D$];AxZJw&$Jc(&m@tWթS}Qr]_df9I9䐋zX/>y>#?(vڄ U7@˅COV*Xkתfv%xz0a[ =^VOSnjdxV KpYX `TZ7 Kqa)3DW 8lJ&7-L:])J C)xkfDW 8ЕM4`JQZ97CWH+cȘs Z]mfhF(L tTӓq̌ s+ͅȰ1+ݺ vVte 0Ri6 hCJS+L&j* wurvwfi_WO+nS%߿kQ׏G82]r4P"?( RrЪu*\/'feEGs'MмXω߸b9_~7s9R GFw?}tqvu}iCo8w m㎅%J:Rv3hF4|n$ M+ZNӊR44-ѕ'W!ͅmPa|/S$3+-Rυ6Y:])JttnNpϘ:BWM~NQhiFtq'plz}fp얮6B˴bp3nbtetTӓiFtlJf5[+Z9udҁKiFtvc7\JF7uJ7 z " ~(oߟVnwonQv|â,:yF`]fᗫrˋTkX>zH5t .J@@4$%G=W'|wto7k$oVO\%j;Gc"NZJsӳnX?Yyyg?]7uvv\ǸG\Q7\c>rrZm_Ϟkudݢ,xI9=ț[޹cnt͞'T@D #ץ- ]*A I*`HV_{KVEyAdenϧV!΅VOW>ҕpL>Έ1i6tpl*ZS+E݁ Е•6JQ;@WCW͉pؕdAEIGҬƮЕQW6ĩӕ}Ȝ *8 wGm?uRu ѕ{+9]m8Ю nxj}Τ 6+wxI3+φ),RLS+E)@W{HWaFth6tpυS+Eyoq끮MgtF c'~zfnGG.Z6ƹQH7sq.1E= 'i@;092JN8JN&z%ij܋TrnW\R1͆WlJ(Pʁ$x?r7g6tp=ͅmhttC Xh3iJᒙ ])Z;zҙ]!]>i۝ \7R_{ JQ]#]ٺѕ \JOIWS;@W/BW)9ѕNQW3ԮhǮ;շCWH+^x3fӮ nWL-]mf(vo@W|n1#'?RNe<>ҕ5.L`fS ]nś:])ʐttGdZqHz5T[ &T|U\x{SO >i ʡ/UY_5?>鬻%KC/[>==*~׈z~h[iy}?]eua.yς.Y6,5y'W}ĝ?7^G7a> G|.XW4[]|Go)+>nQWѼz a|&q_˒V|[N@E q"C}~Bi:i^7>o>?;o3S>`[O0-z#A=_>]A?pVYoo\Rϖ5;g%N>-KbLTeWʎ?vuor}=Druۛ|@3`ɻۑhQps0%eq<_$&P#3ד[ ɖ\bw]H:gr3mV\J9;.l#s}a;::4R|x8_[Їip`s(,^ {hsft]V46&{j%RJ c$bdь 7Qrr9IX-6o "| $@IInH 1 rp#|! f 9q1ZcF-9Z3 <gv^^ۚ&S ph5J{І.Q7706[*eT`L|vQ%.1b59xK505u <D^7I&Im_dC̕l)Sd*Y|,S{|B吻iTU ޶\sAs#L 6åh dk πP#us&D2л9;rwcORs̈qu$~"]AF5yڄRlHMƗ VCJ 9З I tߺD@KZ'䶚E6-H} 9Y=<\hA%EtAlgаH!s'C L|YW(P![6 `mP[ GPQCQ 4Bs8?ҀWQO&2.l "(g. T|U|M7 )`ǖڈa\@\J֭*.t-#4JSnL. /kAG-V"@Q"Ł.Hqk+jϲc%d@BYGS ():YVW `ZhVUDI)kY!JE6ΐ_.}p@uD8J(g 6C :|@$Fֺ=al! #N ԙIA%^,KL AiŘAQSC8'$_ 08` z_,tXsOt,z[ TpUD="?uYw Fm3bk!`|tx݃Kytl:*K\$];6*d`UVPDdzP|V. Ao%CJ$JEVDݩX.6.xOG3}fHHٹuv<o 1t%V,TGַ>_E܉lxnQMZuY '1O_&d8;=7}wyv%R)\5->b="exEKm zZ@mDeBMuj WH%38Ž 5i(ʠva-m5KH)䌊!Վ n<Qy@'^7HXVvׁem1V5d$Ż0 j-Q( td+.nt݅`K]vǓ fZf2@i5eHTyPZG(o<*"bj,},**b,4L6( ܁: aBP-ˉ*PkV%;h,V{UdP5j@eV oTʭ Got^I"kBZ&Fj! q$ ݡ> wsёf nfDS@Ke=;8=?tqZ›ZAS9.%*iB _ Cavae%#h}/=BkOK)a8,1>D`8k)dS:(m(eLPZR [|W  W(B1jE, Jӥ#D``.FǒYY $~- @"HJY\M]bb($klp^' 0]puE@x3g:j̪z 8lX,r{aRJ >#wy񼴁*1m.oUi>m ܛYdOfKmV Azv(S.u&t9k'1͚Is~mh|1Ξr:mr||̅=b=8ǘq[ajN8[bNtmZLf~ܿnll&{[L'Pƶ&k0~j $>STPCBXBHݐ@z6Z6%|@]#%)᎔@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJUAՐ@ ] G p`>ah%3{@0%I DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@:\%R!),Wl@F h{%4@a}jL͞-Y, k3f`j:nqd|A:¤'ݰ} oF,&akߚLॳ^ ߲6`.ߊ糀 <ʖo&9V.m*w=Mj/i |ܓE'C ~ 8&U`$M#iIHF44i$M#iIHF44i$M#iIHF44i$M#iIHF44i$M#iIHF44i$M#iIHF44 Co7$i6f84k`i{^(5}?R%@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJQ}íXv{}o_o;ݾ,ԫYs{U 7Pݗp :7p QrA¥mWy[ڼvЇXr,`qo4iG0*t &`<Șk9f2/e钂aMgtg88wbՑ0wOߺ܊hkCW F_hwFVM M.]`` *BWV}+@!ΫI|X|X4tz!ҕx? V ZC;](=ҕF!]!`7B^ bv Q#:Z); Bn8sWۡЕuWDDWlz}ĄὫfpzǍ &n+}DWmz'p j9B;]!JOtut%|Ht ]!\cBW։}+D !ҕEWdemgb}k 30u5GPQő]VP!h'Dx=$,7i orujы0SaF\Ӵ)ͥ3R*(y}ފC0\Bxʛ|Zݠ\?~\.> _&.~Nby> ȍQ|3᷵e1֢u7;v.|NF(bUMXxtQ wiYzZLme]%-扫*Wԥ+K%JܝZYGuUmd,Uȕd1V<8' <}GJPD5h4Ch J(9FJWϹ͸\N)\Jwr+ЋM`3Op5{?O('>@G!yX 7BJ NW͗p ɻZU %:[%DWxW7BT)ʹ]9<8K f JsW +w+l ]!opX={A?]k69bYޒ~3}:@P܀ m z,g@-^쬞ߧj f1 ,_&p⎐e Pc[!CbiP}67J4o7blmގp,5o+0ӏv`ǟ{ :Ꮧ[B2ӟ@hwv:jgdS}.8K>*x'F΀JR$vn,A~@ mͩZΎZk'SmΣ-?_8Vv$+-& =Rɴր\ݞOhͼpfLPR,?~쨣ю|A^o.jَgfᷴV? _ ;k!j?Y27CogɐM|53b@t=]!\BWAJKAҕUц!kUIqwd2 '*?835VAd*{X@n@~vcr(~{?%(97(ÿT ;CWv PzZA;HFigDWxŃknW} !k]؃fp`+DԂj\@6~H?MfiټX,,f xWӓbq u\_|'WԾb«XO 1:k+D[ Mv9R.w:AOB;(Ww!?j]v'M}$.UNQUIJȠ#/ҩXD !WVB~?icw6حu=VB;fͺG@ԣ/A<G}׺Kg0σ ߮DpY6mQw5t`= è8r5ъAҒq/:z̦V]+Gw(\CL㋿bqp/WV(ʵ`>W̔+m= h8]λ]tQ M>[M6.%4c/eJ{Swֱ5_gجۑmٕ=At3q Dmu ]s}Tfv6,壵w`~Qlb/bLI^FwƘNʦѺ& YrYu99zf۪8Ǎ;X˔:b9t i!:)]sB[ˋ,Je,,\J)u21 -*zE0>04 l~~kVl"n\l o܀A/-;.6Ŵ;GW" |W>tnᅧn=r֛fcAgusA3iҨ}*+L`' *H,TBS!|"7CB^޳u FԋJ>nUP]leE/T0[ؠG`b p7폭AᶓG7z:z^gLTE&WM碬cܗ%}sðQ;21 Jt$VU0HijkxW Tj! N`KqK;R뫣rVldXPq^I]BU,RF2MZE9eUI6R"U[, BfsH]&NUoq;}/u!s ls9bzڋ&atma;em/Y.6vqRh(\+՛tykLmn6[E(]2޾-v|)Oh׸SUMһ$'<j1lӥYQNSYBW@d _{H 22i&{LbxcguiҲEyV?hGFkg,ɬl\'b_5b[ɢPT0^H8a̢˦rY}YKBH`|T2 ҳRW}멏ɬ{?6^:͓܋#nswS]~?:orɫ( іW"FAlT֗~+0U1ī, p{ZaejV;o;l~~o:qo6qaփ]{ΣZ~i`~\ͧi]G_y*.L]ϜGu2%nm\kytg$rC 'ݪ8{abVLm7Yo tT-F[ID9?ھ:+ۭlgO'!Ӧ/`G J~?Vz՜pČ r'y_?kK4^'dSMUAb,}M%O'g.gj{ɍ_!weE!ApEreYQF#y%rO%ٲdeofSlçUEnF?^?RꥬPY+L@CPduPћF8'4@4D %?_dfm-w7[aO3`dsԾ+ZC~AD~M##Y6dQ7 .s2H_(i t)oUrD| !J2Mİϊ);KEAc/LfQUg79B'D8Zt>of43mbCl\if4n_ƣ;H+lx]\~8k}4th4@gg˃N/Cm5 gfe#5qNs{liE>OC}3Bק>i<;k_HqJ r#5:4>D- Sąkvq~) Y9荓J'統@YsA;I,1zBf<2, $nn )@vIaen6[>*6~Xo~~]IӺeGO1br'JעJ/I:eO/O#퐴3q/o7VF࿖Q&<>~hm"D {KH1 rYJf.34x Ƃ0 t5qX̠ z1M[|ϭ*ylsa8oh)s&D 39h=e.fӳ4VRf`G?++~iۋӐITֲXae.1ŹnrYARl~.~L &3|;Xˢ\z`ٌͩr*r2]vh6?<-K,E 拪`Ƶ<:]1\I (Fْ]ଙ\5#޾&HT'LöE!ď/7~v+ԍd<ؔϧ_ۘƎhY_2wadBͽ޼GWasHߢ;殯`ϮtsHrh iBҜ"fxKץ׏fxHb.Venuuh{lGWmv$ahRˆZ6ٺۆN׍ww`Gk-χdѸϼϜ'|/%xuGpM[O%jjtRf>Gp)+꽁O dj#{CZp0jEA3·tYIULDl< k$3dw:ڞ;=hw{Y02 I[3HR}b\<4Ӊ (^i0YnޅVۅ٧)mO{ۗχ;>(dK#i#h9$]]VHs = <05r^UQQ1! >Ng˧R, HIH2-S܀N`-sVgi6JN'cUkcJjX۰gS]wgW % ܈>ERreg= DFQ.sIq Dd I3"q6&KYrAЃEmD \*Rː(#Tmd&ndUaaq(X{,<*͙_f|Y=г8ͺѠ h8pB%6+B`:KE4Nj(a#7DN6g!1).bK'a(^HIH6!dODdg<*orr|%nĆ8'`k͎SAmѣv`& hwDv%eJ 搃'f)ocB2S00LWLl(Dp+rEVth kh  G,Xubj!Tj֨\t:LXT8eeD="mƓa K.)&-:o"Q*O6\ɇAA31gdqZI\&KB)4hb$KZ(+* GUg7"~pq\:%_gYr(.ʸz\qqیG0+`0gBdK~d"Vi= '291pT8me<˼g?.ŃB %]T*f$ HJ̅7"(IDA q4y=G=^&HMvӅU˃J,RGjrLz.(e]CˆD2nyNRvd'O(IWszwmѡ#s,ȮH+0:hDG91fG!3ˏ,gYiI >$[8 9yd*Iʾj<|2Ư 'yZr*ptB(<"2V$jNCi}&R:N3Fzd٦&zO 6BF5YL*xFi ̞ ^.%C4|\Th(HXm`;u!@{\Y gsdc Y'nx]ǔ`Z\NutKnst(ȉyFMd15O9SXBVJ` Lױұ)3е_uNJL ݁O4dgKB%7 朂BFef90m y*neUì1)0El3ȘdEN )#9f5׻66дgcmÕAM+! YQ ɧ3QMU4\(fGjpGoCp4401dL@f y:\r.hdWDY5q k4E[9jM">jg`S! BR+Q1hCaI''u܂DxD@11r%Md㒡LRB +Aq M[c|2LRW r`R׺][# m^f(~m1D9˹[g 滅_sVWz"2-}W߽ZxJ^@1vi(0~8c)hz1q|Vpu\iW)R\K99E+a$+2z.R7Ĺ~՛oo.M-/G$< -&ҵMlzӽv\?/jX~JkWk߀k 󱮞WߵK`ۜ^`I/ d4Xr1 }nqd^h6.Zz>\.OW'?%>j I~L+0^Ig5${7_5$e}fi|H,͜PI6K*LȃnLI۹)q(s=>$dE@ryd<,)0d*fg-a;).y/g.8vI&8n<,3z֞y=ܼEAǓ-a2 F/Ϋfe.[Uauw '2 i&#\eղI4(Ҿy>XΨJ4& ]6Xt=zQz#=$Fc !^b('t:gU)ϸ{geN=תõθ@2BuI79މc/c;,כO-mTi^y]M.kG'џmҾ3*FɅl X(itL[TX=cOJda]g6ϔB9E{60 V,RJA+H|I8ˀrBeO,νˣ)"{`J?FC&l0x>_F?ψX}ol\|~&[• t"קUZ3P `(iF,e..9C  )9.:[l 7kc2Md43aq$,WU[甔O%RA9AcV M)Km&AzQ9( ٣%$8%'tOt4_ΈUu4y_puۻ)-]m?Ɨ3lVǟ^8 ;IC"'Z--sxd:z鄞y^aL$uJdg$ c619 m!Sy ]qT[[s8Ξ9G\ n|6$2CcAk$g0/]`wW=o&}G P.Ej[vC- 5,!+ ^xntMGtƵO*焖lEL"PwEX1tV:r-fz0~Htt3*nϤiU 32fMTT.n^+%z'$b4 zwּ\g..ZMYf0Nm@p+nh*l}l~?]*%|#~; 3"]?ސeg4MOKӮ k5 bZqgI^%UTT7+sWSҮaӜHQ*6#?te~KaTo.Eo"u1%]moG+?b3qpNp\. _6kjHCɖ߯zfH")6`["鮧N O(h$ȳ'Omd yօB1w/c]e$hhl%&?lq17G=5=u|_7!%Pkci<'HO\ *Owڔyx]Ӿ}Ծ ~W`S<$LΥ{fzKm7Lj6UG}+ѧɈ&`3{FtlzgSE7#Gy  ^10*cPDS M{ Mzw;En`:,9ƺ(t漢1 qDqa'7hX)Ql=' ZDl 0CJf/hLqK}}2~|pv=S:1lΩ7_Xc.2<֐UӖY&12J成 D (l*f4M(jT"{ "+/P,Gv(UDkaKD 6@8ZBcks;l/vOnGJn!RщEכb/OO3rt b}3 _Iq;J | L o6zw+,xwnЛ[#cB㰁F[8q,x) 횮m[!/7swu1}w`v~ѽKs 5w^@LGu w?8hOnaD<؈S~;8x!d:\:b7+)91}v#A!rFV 傡T8%r'6Vޜ첻v~/`E2PcɅl ,T@JfYn[5g`1VYE 62( F刊`) @ (UY]Y_OY\s׫.f,7ŲiY-K?O`.N|_}ؖWIckTFJjt.pjWIJB \B"zN7Usj}P klZ7#-ɻ08h*e,WX1te0XLjAQgdр ɳhEMkh(U/9Ñ(6R\ȋ%xoGT;a"l`:M=<+3¸jl\|K +?U=#Nk|60 rt60Ž$rհӯb\%$:c$=\WX3+P}6p\*I+{T~p%a* J wJRJQ•H,4j&fZe o6I}_ ?z)IAl'}%GD3W1O]4J%B9ޢo7K>ԌOS鐭Tc+B\ejr)4ff*D sˆ1+CJ(Fî9 B`n-G^@, &/ |)%_JɗR|)% rriJ)¥JRJ/K)RJi,>XIq5:P$T}?, ak<,ZrYJ[r|)%_JɗR|)%_JɗRc|)/K)RJ/K)RJ/K)RJ/s-KR5ޣA'BueไiŮF25%7iQh 5T(¿xeXї://}^F_LP,`8I`B"0 qsI QRQĐuNƬr 3ߛbtdXDcZ)"V_"D4vgg;zB}:7#qC8𪆕9OeC0e #)3;/ў(MPQʤTm3! +xH/f:XIP(bJG*HQVySNqy_6:zSLXc"3.E,rZqgQń@rV`SFz=] ):9 Ȍ{,ޒȜOA{TymHIxm.ͬ!ktTc &JV j(t.p.E#t#COĨߎ%{" XuJ604r7=LZkJ.`u=gλz^jy=@y?]<E][:|`Ҝt{L:33m^6_m{8%?L&0ضGߒOslm sPDx+ _%HC -^ޝ^8"AH 5Z f"e$Z8 j`k96x%Wnɾt+sǑph8{j @tB NE΃FJ,՘zL lRR"aI,HJ˴XD|0a%8UaaR"r"%vE MH&0}w:@?;>:Yԣ=QiIS~?)݆fw@UrVTޞjIU %G,pQ#XĠx \ZOdPG @Op).}Ԗxfe0N qlXme싅$3₅G•ӜMbk Co耮je?ֳ5Bi$!;)G9@T F"@N$2FB"3L^ĦF!{D` O %hHHLBDNAŹَvJ;1 eŌ~+8d[njd&ѬRP@Df"DV|a%pr˽Cj۷=^5ۚ=o0mheiОp\8$GCE!% H% KlH=piԚ:\h8AxgKP7Z=Mђض5ri3,i.Uq?ZL I,Ӱ~ ۛ /|49?1-,iY p!ڜ(J00GLF1иHD/ ِ"U"gsckkaA׶T}yXiq%OAhftI*H {T#GC2Aq9@ u7m'{(? 93E c>F=n0뽖 @4iѶ:y"*C2L3}RELJQ~?}~^Ss弖ZrD^2,X0MM =#HF$gB4qMLb Efi@9 RAwt: gk>YvHn牋hD_> ~?%7O73b6<Wһ l*~8Reo|S,VvoWo|Zdz 6"[0^;'_2{K]jܙ6{$jUor-?0fP,&<ÌףT}LƃNNޝ>2.fr`ͿO] >nrvɧqah=o6]6; ܆.rlfر~JTb~Q?xmkTnѻpSdPm7>}[hhQZ?1 ԕŅX.{VrɝeEurr]ҋWUr%^-&MUkYp{u}g܇ea%Cr%,mLڍqsK6PoaS]oXՊՕ[æ?磀øҫOU-*iXqyuYs^KLg5~NWKC,2Hj%m7O?V+t2)^cʜFL.``-?.aqB{r:c9>V+B4F"XxzK>J.Jg@:ByiM,FFlzǵQ&E@Y='L0` B[kb!ms_0kD[Ԫ>i,)D4ǻ^N)E%B$! cS8%ctN<‰N8^|D$! i dE·#'4i$Zb!^DKiڟv Q:*S 8J cJfc;aT@E 4 !Tv΀$(PGͧXg!D)oJ ј >[# 8tYV>ɩFh gOF=H%^E"p5& %LAhWB,a)F{鶯28C*~^8!PDOPN"TFʔ׊c) 423d: L[ k;6L{*ǜ`T.su̳|L??`Nɫۜe1Ǒ߷_=*p6@r&LVa%9K_JI;vu'@y2mк{mwjy9,?4an Pw-eC|ٯQu2*?߼-j*(fgMd2QVJ<'h{ξj27^N'W(MB:So Z~!qǸ# \Aa^-k({%MB8 b*AK3=ҖÒzOBgc!-:C}Fz@@sJъXbPGQr4xk"&DHFE+=h9oTvE& zѲ<>js ([hU8te =b09֡-]S__ǓOR߷l_vÓ_?L8M[F\ڝfeV*ܲnl{i CK`2l2F˪LG5pia"!mq߫.@ qKod>`5:z+UJ.$Q@rP@8~G4 Ešc4Ha6j͙D,gET&# Βֲ`h{wIIx{m6nfɇ#cu;\]Yv`ܾ۫"|[ ҩՎ&i+)KV$OV]u~툱n'ϞQؤvI;lpO $|=yKjB[s}k\;aY7U<{_ !+Cw mn& -KmuUb6 a+oT(RVDE Y|7"H%K4' \FHxU{}.M6OQq4O} <ΫoV8܋25\ %eBAv>Cak V7gSqt3iI4|v(2s bI>'/nrg4ͮ9Ѵ)j!-Fe0LY%&*rdQ2ewRky0혣?ȍY^)!B1bFCAn/1XQDBxfZ9WX>p; lBy%'. KzZǪNp< %f%ٔv&)  $ ui$u9g&*~3if?O&: u%`{%d_$exx-A4203T.LBmbK[(Mεw׎Ƴ;,s6 r:v.x`xXA1@ugʰL5gV-3+&}d,xiםfg"`}=45;g߾Tc{;3MH34W1Z ,r! egRC򽧥cA jx#o@t TUoLMY(֌ CjuTʷD{׊+0S Z[{J.$v^VuupUkRzi߈~\5̶nDp6~>)\~l ppB!P8- (]re] ͺvZv&Ӟ9؁Ccz|NEmМ7\ & p)4.Bn Mوb0錧@}DjD=2"Q8!('UȹO})dFU[ϵDkocbsfb|>Fp JS+1D0ä\zu;f)nh`emeXu l$Sd<٧ܮw ǩ@ JGdDB(``KBl`;PX Aѐ ,,B9ÝI\.O}B:}u\{;sqOZ?זh;QT8QFƉ2eD$Naq{un0Y^]$ܥP{bh3:d(. ƥDF1 I3I .8{Ti]}5 kuauf}Eo>+%K۵8˃yV%8h1*T\Q'?ٯC1G, ~3Ael1Ǒ+߷_=ZMKmqT\0G()S#e[BgA҄?WiT&݆-x O7oMD5oUz@fg=/߼Tweq#YWc޺(žhT7Հ]rj%T"Y S]tʋL'"n8Ȥeh/48("k*x?ˣl&f~~Iӫ=^] -BONWi0\C)xJ@WckMn]%.C-ttJ() H&rtG8ٳx;p&TT82S:Ϧ5o}0ٮ7@]5;xyV^-3a(JqbVWi~7;lt^3G}f ?@?猪rY̙&\v2p"h/4~&fLpEk~B[pzCτzA7~IRxٝ*j luRcfy11rqYB#9aW^! 2C̞9+8=E|f2NZD W47J":>CyUd[*tPNU#] -G]%5tОj0 "#]IT-+Q{*ŭUSWHW jt(7^DWVXrJ(+ЕB69XRJpYkB mgDBM/VL~f o17KN0D0e#J@Wce n ]%5tвUBuGWgHWu*5t Jhm:]%vtut%Ӈ^y{|}<60\qXt_޽`7*/xs=:2&W;0<+dB-4/0S<;kҹVpb#V"Mɧ2ّ &c]?zSa{2r/LrJ͙"=iEvpnJx>3]_PYPS|2 0rəu<1a5*JU~~ywyA ^.xD:}Z܀[?a~[n|d7w?{OAJhhKiꈰxóqOӚM|MW>ru{IJV+Tmqÿ9Q/^]^rQ /1J038՞P8FV:h`/pn55iɒF Z8>_]~.5FtkCR Ec0BXYL^jʈhA #&`pH^S)$HRd80(zda4xr4o Ҹ0bw܆ේv|it0I0{蟓v}?OowW-6]L#Mr_ܴ;iQkuZL jMG}dWd {3"e|Q ݼ74 O~mX :h1z96Y24!l%{L 2ec"{xeD)1I D@XZYh6܉k/TW:33Qє/7J. g7@Ђ6U?nu-xEqUejC<3:R,m;_\$:sb>[xaa=l`; (l8'_!&+y1Sh'wx6^:D)yWٯBdr7D^ _KBc4NW L{t  AfcdEIE3&Bp W z歹law7 \ptBΕeNFsοLb +6*6([8Sߨp(Ek~QTcDjX2$9lL|֚fLn Y3O"C.hE0f 7}V*U3}ǧ뼵tޙzkPexfVi&yYngEŮx9kmߜȿ! Vsk5 ?>[jpxL˓8gF)sRzou`r*6[c[zLkF. ;<XN4{_ -!H1q4Xr\ZeTJ(Xiz؂Q4*kc&PfTetۨQ1,%! N25kuЏG$nZkm:EWn) r_y',)fΎ̋}Q/A1"gזze({MgpQRaUcoZ-(MqKuLTWQ0 \!/Sa*gco_/4GښCe&Ȑ/݆?Lﳇ0&.@>m"VJ.x}:ojm~ jW\}|bgkftڤ2`T¼ ׈~Bq)UgeCWѕ֡TTWb׸B!x@Baӌě> uHI ? hȿ~豒ڡS.E"76HZDD7ԉAYe< |#5\QRN)3)kkk cR1w8M(rn`E#a rX+7B"^RaL`|5)vey8OGu$HwsE2 f]⣓'\XWɶh5j % xgxL'WfGAe 0"|.௜yrM`FSTt;o!ljQٜĎR7LC CIXz[nb1!0#b P7,C0! 8Va S띱Vc&ye4zl1[!-5{õszi[;x_ފ)C 8:fOn8BB ֺ*6S>g&Qnx9mZ8z xû :it<|J:%<³Cq:22 ۄENs,ʙ(WBJ`898~P/&l?qͪ\-;LD65铫84к(DgC;eͤT?>DQ^ZNd4ћ(@cV)1S(Kѓ#.TeNmbsI:9vRNcpQMʼnE=9(_,Y>bdX6A`f=nmp< Vn7~OmF{$7,^P֞YW:*Vh!tIBߙٞuFAw෋)HC)u^ޞ=T`#E~n"CHC|fgzL;Ya+)CZ'9ߗe恐wJe#Q߫._('NQ]9/<-ٳL-~&bsŪJ8Ā"W_O/2 6!53Gr܂(PC4e㖣-GQE,Xh)#k{,%\u#p] ݤ)SCV>Է_ڣ{Uk06OhRhRLzSNq飶ěU(+q@Ru3cmpv3c}Z6.2BR3 _ Wfs&Ӥ7 X3=~/+L~'?8ckBAIBwڪ68p'&bl #f2"iLj#n=D&W!*YmnHOšUKeksTrw!rK5M$%[~=HAQhB2AgFOw"Jh,},]7aNcb-e0 2E  A/ױ$XC@Nv:&N} Yw DK֚xR=정 RbA{= p^I 6CE!% H% Kl7/{05u18q{c89"0i5noQ=Mђض5rڴC8= sWx/at=G6EMǪU Lk-y B4NPD2VH܋5% ,Z3owcPȸ0*!fO'Q/kº KK TAm[3'R0$gG',U$4 $?ۿִWٔp#\9V4{ir5"ю/SrŒa,f&J^ZCb#3!Q&1Erobh"rJLSK)h m`Rw+>2Ӷoi"tlqy0GF3Q*fË1>5L-Oh^ݭi\ߨVY|f{]m gkٛ"]D|w1`wvW(*{Knnx 31TJF]o]{h)p?:e$(0,շGxc[hyw#Z;1 +X;)]x Vl9^βr:??:[TH)iE.-:=FK[ ە(Q|_ fЕnZ"Yzw.{uu9d܇sβpC.}nRϗ.ʼȇҺF::kx+uʉ-Ks0qwp1Q.EJ>UMija'$EEk:qe?lR*~%YB>[vNnbev'V^!ɵ"Zl9ØDU2hj &Q'^bҮBE)=tP]&"3:Du^ZFѠqmeTDIsdV0`~}ՙ\ mā'BVq"PWS3u@'È6É>4ADh1zbQj!phe$ FcJEr-V^G|gAE¼ ۟,w E='=yd9VOVUTF@ 3l- '/JM-ԥij΁3̫=j2|v[ n=` {o=9)~4bS< wA+* emSsИJ{D@`B hG)'<#-:_khjW| N1qc( Dh,@{Ђo[1 B2*Z驎7ѧ|8eQ`Yo.WU&:b= FK(6 w`4Nr6}MK_ߏ' ΜL?w}v;_q4cZ׊Z FvVf#!- GCZ((N/չV #}Y["W-.@98GHLlNk0=zQ>c={ΐrV!:2D!h1QFe:%*EKZy=jkWuJ]e2b~W]=mnz'z0+P.OjxO3xWi+7ZٕܐxU|9y3^~N89?dz$ G' _r)<\ɿFǥvFFYq墤L.ParVSk,+Ϋ ٽSBB ?n_76 ކZMrZxnfuCu>T>]E;*wB-*̯Y|28:];ȡlLwzz8\O^M'7S~xYR-%f>`* *W0a"$3U=Bgkd.3Z"F$I;bN$JENۨ9^ YR-ytrSŔ!  }CAhxz?̇Vs_6^m[>/WNB `5:z+UJzT(B 9g(X MEjD"9JfZs&Q89˵P<ՈdYrvlkGBdx7}of~谻|bw7Yڛ)»E4g|S,R^ Sȹ8,z4*qָX;^x&eUO5ϋ<(tyG'6%T< JV:HM9tQtyZ<0|?ى5dz<R@svm8 SZ* h{+JjB[K}kRLRXסE? =Dԣ@Qۖ?L 6"[Km(,u]b6 a6"h)QB%5 K.΅!PimփxGϩj@zBKb%`;](Ukvpd5.% <6/21 DK'PeD%Kb)߄0? }-F4ӆWr*4QsC("jPGY)mӦaW#7B yňOGcy*Mx;zic2|$pנyAG[Bx|{H=Œwbײ1}ǝ|`O8ٔ ')  7il>eI~״ mc,.L)G\ IR6[GZ8o'nS\Gځ1vPT+gx#]0txSkvu$"aխfeHa=gV]dznwha/,x4NY13mx0z͡z;g^xFg=2&|VƙlJ N?3I)B!Ҋ|ֱ "Z<'@t;&Z(ߘ7^{Q8PF3T( )w&J90S)[SKnuI".켮pBeRNi'p8MÔ.}.<ͧg:"md(%Y4 XЀZ%>ahЬk[t*Z?n=IY⹍QK!r]D\.!!N3@ 11Hg<#'ˈFᄌ>pJF *ZO}d^ilJZۦ95|>FpAGh]0U6U|&FóE0)nh`k6񂊈W2:bqmI@ȓ-ӯYsSE*L A>C*PP8"(xiNP:W/A\[/, !k$ !22lq>d4 HB'g<1rLPIIr}7j!*[sk=R;֖RT(FHhNEDh h(pc1D%T0 CR* CS:& QʛDiҙM4&ȠOF, X]OArQƁq8P2ACT$"f 5LA7[9YC9E\MUspDlHrgVVZ{7,j?<-ܗY&˲U1<>HRD[aNଯ:<&ECtW lۋVPY_]Ffoīl6}\e@d]Ͱ#3܁a?ƠfcP,B|hxƠPMZ6=A澱 Nǎ+h\Amq#e/:A\I@J 4R̳JxR!,:A\#pWki\hW2W'$LŠ N̂+UkW-zpEzza ]$Ծ\ԊWLGvE;\=uIDRʕ0 Tmrǎ+\ 7뽕znV.{8Uk:l|Fo5Ao./}*oZ*<*~9 #a<~%[ט/6Js3W^^_]{]Ç!4pkՙlk= N{;1O_\ҏ˿?}G3:Xq5iU|Ia^dQNAQ(?4#J`Iìse9Umyj9#ƥed?n &j)ݮؙJyUcd9$|@QNS@34 vq\\W6JU /:A\ٽN&• N!&ci\Zǎ+U):E\I(3bd9Ġr4'MZJUqʹԮ<ŠWs[.:A\ҘW*8sJMӜbTQ,WCo_I|MImXo= \pe\=uљD`{&Njǎ+UIi)Jxf*RWo.1f2zU~{-YE'O1$nVa}uXnyjUK=էe>7ɟ*Wxj9 d?O0:&w|{Z%E݄}%o71auw*׺Y0j]v YK4 7 ϸ abi*ENFx\`NqLqE]gKz nsi"LCp4 !78 Uȱc]4L =H= L+g>WPힼ s4~"\NR2YPu2rXq%,x"\x8 ѯ٩Jٝ""Up΃+bP{+JUFYpuJz70Tpt9j ǎ+UI \#_CNa76zLGvfWCO>`'y1u< Tǎ+U)qKW.M+,RW2W+q'ŏd݂qr1'K@z\PH5#_]ڿ;*߼ Uȧc.=2мQx;ظs4= |a5~jvsE2_= ^zz,'I)ĭg`}H\vU8SqYK:E'rJL+Y>֕ǕdqS*Ef; קip(f'ЂS՟Vb<3_JwD&HQrU F"NhI(ɗ ņjѵѥx@^y K-b]rpwH& w܍)aN"=X<&3qX 9| }JcGĕ1Zr6A}>ф`v^;tۖ&SKCCcM!:͖J*`?ɟ !R;ʻƈ2jY|5~kb1J gHD`'~W?6Dޡ:Dy%S>R8~JHe=xnn@:*ep-܇xOH%5ShR4#@n$*Q3$|1ÚnN'"Nؓ9fqu$~b滌mjJKo%}5TP )E 9KFcqкIȆ&%xX' E6Mr>dk HN@#'-U29;H(2!b :`=KE Ȏў;z6D@N Lg& e_hZ%MlR[ EGG< ^ 4s0Ԁ ow[N^ ‰rқCPBӮėE@mbGu0cm̭a\ l{:.t-c6tώ&lGCVj:z@.vXIi1 [-PPc^@n78(Z2DpֺM,>PLQ)v-hߜqj Vp2)lPQ&t$AQQ} 껡L8Mj &6`FDUgdXrktu'ە'P1r3kig2aۚo}Q>: dAPBdE"iLa̾ZS X-[Kn9 N0t掉\ U:T,&Mppg# U .~3@friPA`:jonLEwo %RpiUfx0{Y6x'D)J6􅲎()b_WW ]9D5ڽWSRE>B[Ə 0J%V]o9#[-Gࠄrvb3l!! Lbd dcAU+cG]bAԙ1Ozkujz .M3IW'$'ϧc*.t"B!&$̿31a=t&XՋ77z˚|u'Z ZŬF]] H6#&P 3 f.\#\~:d.qXC1;6*l`VPĀddX/(xbQBE򠷒a$RQd"5F 8rtiX~9=K A,b3,k\uU;do(O}L,:٫pt5ڞwt l;V+.$c)ߛwyvR)\u[0t6FD4=$䷷^AHMuj KRX XmLCQkiYۚ$gT ]\ƨwl 䁈:(@:zxWinwXV߫g @i/dS(DΈ@!25v'"X^!32FJK)2~؃`@Q 8xPf#bEث 0٠|,8+lA A1.'jj[2Z?+{YtgWOjdg h@eVo Pʭ ޚ^I"čba-#͸DP}osёf nfoyMms( %DכU ~/Vô7痯sV&]́`0^o]9xl\I6 a[Q >;Z<,Fݚcޕu+ٿ"^W iyK3i4^/:J.,ʸOgPlv;)ӯ C,~)%p|7i)i6w&˾}^Ԧnkm~Cpul:m);.C u{0#mׂ|ByAJ 0]Щ2z')@`%@_H;WQ J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@z@{ ? ~n8'OC[aPFm_?oWXF UϷvUq \:?v軅u3<7sŋˤiɯT3*eKL k(ہ h¹f7>_&zkJ6ԧ~['7*4W'4ɆB ͷVFNys^34\`+o?J|f~ݭ .iwRD}c2@8gQ?_]_T468n]q]GP ?S<\p {_~y_ |P]IE q=8;^Ͷ5a ./*MO-s4 99:f$JDɺNRgN5/ bKTǥbqYˉVƬ)}_eF|Y\1B+L?v }BYw_kz76X}0ҿ:_MuS>x8cYN?ب#kݔtwf}Nd8[Λµ`t7~Y8ɱĨ}]/bXKR9mЮlN#KyI;%!VG[Ax!/zQUcL9c &ޅ 0so'OV@(&ͮw}Ha) <ka2E3E:&.=5FQn#k:kBQqW3ފq<gyW.v|u5p߬N5U5e5˟Zo˾*&V]=?ܾQm')ZfW |!f9vAto'LwMnQKo2Kj)` s2rOAC8zD+<t{?]F8w//Q`Zzկ=9mzDE(a WpXoj&7[ a.xn'1ђ?Sol0dۀS="42?K*E{oC#qx]4}4~?Z5{|? WpmPkkީ!KA*ť$r2$b`$\Ug$\4xט$lp@l pF#eL]f5Eh)rc1D6>rI9)w ɗKIha|6@$R9W1>RU^+g.9UWߟ9W/!rcxʊt/u -#mI {$ɟ/jcedȜbV_~G#ۡ1(2u,FkC̬:*}dVhK}d!Ae9>J!4MFrjՔIL{IɉySUdX㶤Ȱ9G't*Q.r6)䜑$Dr|9JDtRPNuBE6!ڽ42-CzFa|]z2r&1m#tTY\er =8k*ʧw&.T9,KI#T9(pr鄿N1f|a'Yl ?yt  8.tb>v%[ mg~i![J,upfPLفY5[;Wɝȴd),hys\=G(e(J)x_׀luܤ8(gUp@κnn+kw)Q>#[trCy@5nRRo iO1x.f.f{yyc%4q09m.A.C֑E M:v{MaΊuͭ+fѥ 0n{{МBÁߚmfo6so=}ˇ oQڛ:.ot <N1<(xH9-ķȬ,(Ip-_g'm4g$m4ggC .ρVH<1NF= ))jS!6kg:2~xw9q0̾;qZ y 0 Z"&Kʹ3-,qԕt i*9ntCQ˜5[K'ϝyR_O6Vɑ+I m 8}% -}gLY9|X+yPYfK$a,rPeHGڣ"|~_HxnGԹ}(5w>5}[(hOrnG{9'l .(,%58Z=QKhR;Q)ڞ&2AJC]4hOO.W&̡2 JJTbS.W]4LjM]J1p{c89IĤ0y@4GKbjDU8_,!Gh]Rw~sVAytZp[6~IgY1e9H•JhUo"@Q9b5% Fb1PȆF1S1MM4Ѵ8rYiZKht9PD6VH5FDr`fR\:| Yu(pwiKl2qn ֣_XBX\*mt<!zOXHiB4J"?}55u{h0pkmH O]WCq7H{F~F?%hRPW=3|I"%Ci$8LLuwUuUuS$>^h0& i͘L{t>M4U ,HF$}4qtH>EQESeJF* ֲJ٘nύ`ΎV.-+1 :]_s1I<CT#)(dO&sgk]đ-srNO0M4Ig7h̯'7o|߇,/ͽ)(Fwqƫqg$gh 6P|7|yA3wњ0*~u]?tPd،QaFqQjnǹj{QV|6.F\VОOY^˸6sdϛU,>;/p:9ӏK+-* K?U-t1;m.]nt9zvB2#ٟ<|X鍷诗/y .m4W| |"#V=尔El֚-ҹ p|z|P@!(")\Wx al'|Q[zvàoAC>fQddQ\jsW|o}stWBp|xȴՍqۑۥ6AvtE񰁗).o.Ἧ?F;CM?8[s 㹕^},9!E8](2LKl 7Puʸ\eO"K#Yh n~,9t2)v9B̝Eky:/c 䜱1IxY&-8'c$$F.4A$ 'R=vc8-࢒?+#A9chb!htbPY I3t QD֐0aje`);Y-oDȮ}: iD:Ӊn1$I"⸘gT<; ϧxv&Ws)մų3ųFp +=<BYq\h -d>Vx# 5/%pֱÜ ڧb6+fG-9$ɍ! SQĘgxl[jNqv(Cznl $ws^I$7ѼksNLIExb-rs/w] u }GT]D"5S`{Sݑ)fZHM|*48!`p&OZB!EJ)ǜBm1D&cZn[v9 (EQj"BXC#w&QS ZB%pc(R(W%,8Xnޠ1 wq0U `ׄ|W8ȡɐozn~'_2 sûӜ^9z@gW}Z*om 3/,b֬rGU{DIzq/?+#܂E'W --_о.9[t+vh{YsYy@l ڏm*4#IYtQ AdJŹw \`6Go5z0'&xբԥPꎹs ݆Mz}n5z~6̧:_~n|{lv<^ яU4%=^l9LZ@]bR& zpy:nzx!Mϔvń ΀RR2! DF GDQѪbJ$|sBD #438NrQ+;GZ2"YZkk EnN _hA6_}3Exh>S _z W<pkuјA8  +\l3p(׆rOA9+vd:; ٨zzLOd*e Get%T< JI%W8<\YA$UyZ<\~cPXs<{, &e4"(ͩ.'R G\WInt/I}'!;#dGC6"AFQ۞M 6"[PYF7_7r쥫qg: JdĴ)FTX$MrsTiE8W ߫]xDJ!*)wFU_h'&$j%p*WgGSyC׀^ Tg$sMJ58X$A-P/]Hf|p2KfA# aP3_gǃ~YlMtçu5k> bT%u$2ϊYt>~Y2ewRi6Qӟ|dDZPE^J!|1bZC'An'1X|y\^L3-Luk=; 6O(U`Bȃ'Oǜԃ0J,i _ qKofJܞQ%Yv(vsze%xš$5ma;иoƝ[i}۱ٸm9O6 L 77 Pn^V2l'n/k6e잎Ֆ+;*VLDс>kR'/OQEYd&ݤq9jM#RE @'_ݕ<ƠCZQz}:):o۾ n>05-څUN暣O!Oxy^B4mR^Mj\ ˟n:&JKQBށB#PX yQk1Ac9C!C hY7$ q܄"VDo6 C! LGnI@v4Rp2hdqHlV@phiAԲ9Ijr\{=(J_Zݧ9C)ݰDSz  FC1D*.} kEu}XAQ\ ALY}|v3Fg68霑2%#l),FK]`JjtB 8 x\Gr/XA'\cX4r8a'PX@0F[;[#ns^t|2nsص)@K3f=R#qI1Լ4 %-ypr 7$څBvn0AzI鐤XwIR%BPs&"_ڋ˚$ػ-޸rG,(<\;`*鄗hU6M@ ʝb^ƺ$cSBlƛ!h5AK DJ.VRXk:fD/ci9B־߽Jbm-j Ápp Ӝaewt0`L)XxaH ͓(,QXb€F^|a輎 b,8oU4&}XeR.FdC\h/ЦoKJ*dZ@h!E pa2!,Di2!Z(kuxj 1]@"ӉtڃW%-[yn (NeRۨ-Xԟ<'9|tV:&MrRbX~e []+heKouPKU8tD꺢b:.k{}FܣABQĮ۞ ׯȍ+4aO'f1L8X}0'c\\* $$Ί6BD ι+506")x&ƽ댴xVo_ϡzZ5E 1PKwQ Θ <䂺Z.2뫫S]}۽EGiJ,oYl{CTmU|;\n 10Pkj <Q]!V'Cop";EN# }k)C)H`hʻN2(eJ3*, &0N'j4V)"\ 6;SӝvVw}zy/Os$ZOܔ6 GԌaP0Pg)'}qjVVWs"kGvoU,](ORIhS ͦf&W粩ժ뛚J#MopSI.>#uF]ery.*S+uUrERWJPpTU#մJ;nuaI{,y1pjc`CFF& Z‹b6=}Ũy~eܿ3!Ȣ3HFF)'{U*Y`5[.si_HǃM@sEbPZ3 0aa?e8|G旊z N$ɓWl>{]i}RY^jefw,CBE"Sgj{ȑ_1i;"Y 0.}ٙ`Hr&[-zԶqHbKݬOU:8)]Q`3OL KQ ֹBjiWޝpjM;}T(; IO?G١%gt#U#~;!rF .}z6{< Do< A_<9FgWj^'VS&bZUE骢0++~BK{CW^Ŋ̩UE3+c5OU= عÕЛ,O> 4]F {DW XTpMo*Z'O4++"_ ]1`p ]UϽ0p8]1J+k+GO+؟ W*Z:yuUQ)]y+ȡG'ֈG5G^|Z:P2/]'ЕjG/zPPwtŀ]Uh:u(]j3!p(SǮ2Ug~<Òl^>8/œu(MQo6#HM4F7d<('6sx_w8wZbա`̣"Xt g?./-\uk+xsaY!:;<$ L.&)"рA.d 7QnHGy-@hŤ}+Uht}{f.Iwz'wݷsFY|7S_);ݿgȷ]H>>[@%,%e 0 UFVEFO 3d1hu.pOB= EzwysCӼ"mY/ɟ~c3* ln~N?>>mk>+qAa{/ołN&ГZAB0'pAf_ (m ʋ^[TJ~gc)_tߖ\У{ѽGq64~лm=kʽf*zmqf ,m(ZR+,G/4F[<>LR`Y1!ڒ[&dM0tu -u_uj\}u߁:eH85 )D!I4  Zp6}6sd8ۛ>$Z{;jA`t]W>?py؋K<ɾtqSLKmInY!ut$ud lC9_jyЁ8E#06$ |cxұSv: hGGy]S^fRVH(S*>TLf$v- -lmdUfЩ8)E$j*$Kւ5`Ɯ"Q\Xug8bN׎߃(ZM1SjcP{Kγ|47ݕabRE̊=HMTHuoCz|1*/XXwW)i!7:p6,!e h0Bة~T$c(buV0I@P"3ʣb/$LY¹cgpV8 ;_~':g9Ǘ^"Ⱦݏe1K%E_EHtRE쵰[mkEB0`S$9lFpI5;.w&;)s^ );=yWBPTT*<Uwln+uw`hZ,lݽ[͞unSNJeJyUe]o"]%5 .g/eI+g<#y %?zZMhK/y؁7i4Eh  oؾ{*X;R+T.iD+3iu{B(#b:-ܩ7ls9];߁9R!8*"%g/; t wg56%`IZi%J֢(: OJ{K>С[,*z%3SA̳fq:O:ud|>U|urj=k~v*\{m&ECc #֊(5Yꗫ}ւ8B:a{$?ߖY]c+QLLUg0 >Iٯؙ_uG?gXgKE[zJA$/`\,P s~;jcPR-+v?}m!7sU:y3YE^Lm5%v=.IQ-Q[cSt-*:a5oVZ& ^ ;vܬ>d?U^ixskWo)kS/[:ogMFlӊ5 /A ^=|;&[Nz{PE|7 Gt{'v8ґC\Ży1τ}3ɷxё6oxp5mh..qMuf%fyŁV /;c6 w)3, ia@k̑6f_,6U>wA]c#&، LaTJpڞ\: P: D( 1GjDL>KZ$ m0kZrjp{6k(S~|}xqu1iOCu[[|HEL[*4תdNJN g4lŬ e)e #@bץHPqAβ4ƝϵΟl܋ԋ#Uɣ쟮Z96.ldWt7k6h`w#ȅ/,8Uɤf[`咄Ud-Sf4dKJ%~Ҍ('Gm*.sRۜ ++/e5F&38ۙ;v62B1ʁ Lo2^SnI+Y}g4rd3S`ȁ6*Jud)*+oQ'VK[Ra0D-cAA I %RM00LV2ʁ WΔFWi<8Vk v㞰 >[!)*$DBCAYf-R2$EJ)2YnK1VC>Cfe(2פ,QYQ4,RIDv]b v>lEeJˆ}:fDq`čUA#%`39/Elgyтبj5nWtЁLLSFt$tAbdd噐B$&i!SI+ĨBĎ38;'Q"sb}yQw̋8{aΩddJd=$ j ?X‡}v̇{.4?u݋e*qSܐxGdm+qlM*ܿM?_wѽY1H, 򑍋6&l]Y8=>Gڣ}'pm\[Pyi2'odzVϫMA]'xzϴ e|Гni;e} \J\(7GUk>67\A/n.qrҖP2Rq*$p@V`MΉhRpQj&p>ɾ[=gOf\#0>LŃ;mfžu`0 h '# ذo9w5<]M{{^uN^X]==wʭqmhEQk>%m2 ٕ9LB36Zh Auu#hRZsJh!r'O@gX˒H]G;p˦' EV$R[f_[_VA}kXr O:eD-`0&WaIQ R^PqdTi_ƃ[bl4+T< 'x:[,J 0R7NVsZ sKd^Vz:{γ| W wf42Bq'ٿ"~[L^.Nj0Uo]=| |<&E5Fi )Fz=M'gggrGɦBWeSWҮ?3B\W@:_.~a&䦨=6+nK؆.Sb$б~-K\~mkN K3&M쫲okqyKBG PӶSWtbSW\xCg-d;=l?Y$,B?u*.|ڿ5"|Q -v@Xtok1֮3 {u} :dqpvVCe YΓ!){n Κo|8{6nviF# A?nx΁-sZarscV` a: 1k-Xioɿ\GZeJ]iuTkw+MKd) yCӏ .`.2%׈Iy";Xk!_Fa2YmBr#O᱈!VPL=Q (,]IKZ t!yjm,G#3Rs/wDH}`Gc(y0OZ9a3X moq#PWSΙ:!aD:‰U}QADADYĤ$ "W -=kBY"y h7'zDv' Zp` ϸ .i[gd::)2ʁIIBwfǀF bNH̶;!0+!=ŒC"_N:ZCa0 &v1U ;-Q1pASHTېV'(ѽR;mTmFcp&p)ltbP&6 ? H4#/a7! Iy$8ve(2 .}ci {Sr "?pp\yO" au{/R`Efl(H_;ne3$w>L瘓sP8$i0/J?y$aɯS|pO:)mȫ_f2Pc W٧ ehmJ'͏OR[A{?ZFlf}Y l/w70|&ÐQiΧPkYU.EǸ·uX![nu OʠzҺ.pӥkV*8ʝ7\ ń3cĜE NVFZZwWfUWO|`R&X{0;L]Qa[W3z1ZW7|GI1GN0(&4FQs% Ψ2/Xѵt;t;tDU D#acp/beRW"(Jpbq OiIVs\K B!pQXIZ X5{dc""k/%%yb Q ƑCk< oKtH71&. pu%%W}7E6ө͎Ln:i <@ﰊ*␐@)[$a&tw4si p[/xT;e# Brșt=Zsjs}k\;aY7U

uKVslyأ,I%D)@QΒ7RI/z1x1~ȥi,F$ ;IsXN"NI_Y` %{!ҵQq4-ϰ}\#@7 E ҕ'sI4M+t /8V7˂SIt7+[js>\]28^>ނgI^8ݠh]M94aqs{uVqI2E^|3vY 2eRkz0혣?LB.|LC'Q 0PQDg?MYIZ#zvO(hFȣ'O9y%ki \;F-bkˁ"46U6 TtE!AGӠgGw  F RoRDӣZ ՂvՂt Ղw&ށ`c][m)D29(OP;FĀv8fKM *v#u4D{S(' ZDl 0CJf/hlYmܠ*rL{z>UȼzMS8j6%VZz;Mi[Wbz}yU8yW1 yU`%% -gĂ-ϭ4S$) ?Cd|ɨsJ|Nf(8 N`=aϐHA8 #>iGhHa䐤X%Wj*e;Q!(8v0)&8OpzgkܯwanUXfز`b`p)B#`B@)+XjN< - Ê:(}[_yn*H)#tAI:(c;- 9bJQ /0X$I}iXAلV(J038՞P)BZ鈢q>v# UjΘ:=~\EA~;rRGƍ3αb80j 3ɠ ga- gb@)#4ZW9]omubG[r+@,6BHj#8TW[w}60[ѸCŴgs3vٚ4cӒ\N) T) :2S"N[N`t˫=-9/fx~ a'v|TϷtc zEJ`0Dkݠwݨc5cTj@_(\4Gy$"kY"M $3II>$oHffLrJ͙"<&zG`j4mcf,3tnΜXLjAP|I޵dٿ"a۴4zwNI3"Kn=b"%%YiF1Y$OU:u0_G>Gy4 dMhJ; =ʠ[)JԨ:r]rDGWSl*rTQa$#x>G~MƳ"B,gh>}*.Cʧ7D?Ôh:hMWѶ44Ti:;>Tp"[DW ĭWFU&qr@)PGWHW!Z+]`CW mVHwtut%Y:{Et5歡UB+K(U7 J!&??ǟ|{ݘI~*H+q^嶙sA0(&4F )a.sQ:BA>w+-Mo3C R -M愒ϐ5GUÿU[CW jNwtЕڳ┫ӫ v@apuU4tuJ0W@Wcsj]`Ik*-thUBIiGWgHWD`x*5tjUBIuGWHWJszs!@g'7@]99xFZ1%.Lki7vNf" cw<QF9" .{ c~s%NN? ߂#Z0֨5!MŭqsZxݐOPn8C"MQX\ݚ 2A䄒ΐĒVH\ښLB+D*l 펮dZF=C lJAxC!3xt%M3Jp)n ]%0=::RYQ"R 9 .emV4>vP*ҕVLmuUZ32 nȄuU/H:1]Wx۪ŮC62+ձU5X`UBKy*䬣3+"mi4Nm{N3n܋̴lM:,_朡f4m\!t*FZ0V54 p "m鄖4JF;>Ci&UQԫBUBٍ5%] $Lk]%RY?ZNW %]!]IQg]6(Jh%m:]%sϑUO0!5t֨VUBHGWgHW X*]\g0u# ]q_stf=-]Wvd@4tu Jݬ-i8ڟ]S`Q*>pek*Ut AwtutEr6+̩j ]%\Z%NWRN]%]I+JSNٕ倒naO9 rNVCb$r4#H4:WtݘZ~݋{ afҿ_,KC]y:y>Z,H6OH-T㯖F<-r_GB3| ~[n3zw9ޘ_N R40][|W7.ES-)`<]棜-l)x0Mߗ.aC-ia3Ȼyl"#W&)m&Yບ?kZjf2n/&e(N62~1F: lp6HF_jRz\Tknl̯?pjҠ#W—/Ra*g`4Bq_͇ÍUtUn<z8N V|Uf.u㟡I${OC!<>|ꋺhyTT`X;~0~ޣ.M }q~V?t't?h1E^ 2 n,S.ߎ_UN^U[3p9 |) ƹeGz"37 RA-gS?JaQ0|Y.n>n\ƖlhS`^7ҴRQ^nr^])d -:Xg$>{5.OGRE 6{G,_fڿD^ 5HKx$e7^b4M/ շ݆ ~.pK+hS0xrБ_ìw =+0)uĤgL)!} bJ+;w- +˜ >RTbb;Q*c e2+gI1#rQ /8Qޫtn2@1>0ɔ1.E 1A)1 ld2,92XI^WM8'sGXmw iw-ʴwd% g1,0 C#~W,VKgg܌n\NW?^KMXzP$fWUH[;NH(iрMykl\2`*@ 9JTO_C.7^n'5r!rFAf 傡d4}ui} V"aQ>/}69v,ri+Ix-rX(L|f泫"ME?_}>E25hƒ l ,T@JC63G⵶`1VYE 62( F刊`) @ 4萩Qg}x2{m_E&R,fJ_X ,q",'a VQwfڮhpemb/՟SOQ )߳g{ب$“}ĞjY<+%7R}ڕ8O~rJOXH' [=E5g@x![Y{L;>U@yM>jbiGT"F `3ꄷ )gǛp zzWA4]$h;D.]ݧ6Y1m $PXƁ2͔>iPE7{(|OISIJ׬Ύ|5v\a=v"j cAGGAs8L8rd9F;RbBS&J;f2Rʃp-{qyVs68{ %6(G.LH=|}[<|q;~#Z?R?㏗[e}:6E NCL,zV |y%E2hs+27OAjpu;"KitL*p;aQiơ/Kh (O-@2F;=1B11*gp5ֵƺ3hgqX.-NyN!4(42$f- Xɥ1OQrBq(' ^ਰR inU;(1T{ >zK0Y3O1Nl# -1$&mE8ߴ%ryxpߤro0K_o0;0tqO kܧ%5,ӿ[yPi߰h-Lb"2ۭ{L0y7̙] R;;YMNH 'hzSZ)9bh*(mMSh2|XQa=zoG彤7J`6[+xۏBe[=|ǟGyhR>+=NŸs膮w +oo* d,u,C)3O|v6}Nwe&H#}m841T~cUg ȕZi5Zhu#/g7k;d  5K/^>Q0gJ/>Ig61(*6؆H'@~_( T;rU9,@OKlG?V:[~6"c{RvvzkncÍ}xl&,Fhf )`<"*$ZB!Hp LD =Bgy֤ǒa@R௎a%Vkc&88Im {#B 8kF)u7T474߭swzfkQẉ:Z~oMAnj35~eEtNAVuN2|dR7ɶ̒$fpįjJ%7ih 5dXe[ sF{4l EQP- Iыڟ jI}DA@JECցU^q\"|7#"(A[ .k%\"r68,Ԥ}qbkvf(72A kLdኂA4e&"e522MT`-*:[cv~2]>"}@ ;YkNbHbRm1*֜{RcTJcTk[nFاݨ@ Oļ9ֻ/%!ѭ&ek~`lER)6ԢBeDFEh(moXHs_JHu:Ʀ7|$YOAwBJ-}F-C. :RkHZQdWHF7l"gws19;jGLgv|f}lz|}7[{|ַo{{GOw mvM7ܹGrTs{Suh~}η|9(O~]>b^ x#z杶3 z'm%?|ltPB&a1]d0TPc?ڣ GqGbH*2&JVV,pB1Z3hB1F,q ]>{xT[8s>}=R+=+[VM[BVDr%XdTTF y H4*iaD0l'Fjtd#aP9DZRcCm&nCEAδw"ݲnyH<|mvfO߀|UMQyw(Bn$@+-0sB@dD AlM8[r. @2PrɡGeW3,,TcdjٍJ3,lbl0bb᭷9ʌOi}p e4m}x't||\}2+63+RRNXf!d0AɉEHLY"rh,)RMe?htgQ^ Ԟ7%Qneկρh%nĎYb mǂrD>F*࢓ EÄ#Zl2J)$ш- C!33^tRdk2d$I Ge E`juKfީb b38UcDT#"Ƴc.k,9oR U}IP``Lʢ)"zB$]0Z/Ƃʖ8sLN!VK=i t1.$L݈QG6r:iɡqQ8cOF袼Q. ^uPY4zO>I]J*ycfq(xxgݯ5wX2я( {ͣ.p~ dYJ'C .D[dR e,\;#wTQ~GQ"*%@j]{/Ƨ$IGhW_ f8@ 3 uU8t0 t5n%nc jFJ G%M6ˊ z#l܉T7Fk %"eᤳ%%P0 cfRʃjQv ml ؒKjz&o^0GE9Lͳ͋!kBVA^6vD6TI2bI){h4g1qc"nT cvRX6edbPEIL eoNЦ85EC.xԾ/VGJA׶Bdgrmόs/1pdAiY۬Ң?vUJ c?`MT޹MkHԷS;iu,)T4":Tt!N 4KNRk;&=A;ް%rޕ.'嬷ˍ:0AEawk8dPe5 1{1;/< PT$%Jl*'3cNX.O?y59-W/~>^w,.i~y%%|^)k|:Yߑ2O zdr<= oz(C~lg~64p{\L{5)4po3|Ѹy_7s Xh9[cȲ lL v9?-_mMtc}wߛ;]:"F屌Ⱥ+u_^=jsKkb/ 0' *>Llӥꢲ^a';֡\}4.S5o錯y߾xwлʹݤv{#{!Xݜɺm.M>cu΁)ļ#:t!iP(e`ˋmAD u Eh%R)%ڳ nfV9Gcަ6/hA*%ԓHQknw 'R 9|[>ƣGHmP3T́ʋ,Q6*uV(e%%OC3ϺL}z&.k CDY 5i'YQa+,u#svTܞI0o50O-kQgX+`h)a9$S ww+Zhu򲷔M|4g-֧tUJyKG|;Kѧzɿ~7?ݒeg4Хɲ\Sm\\XvJ:4M~~0j6ycNݹtקOg@UPrQe}>[^sP_ ?!ȓsׁ߭v%=vot/,x#0 *򡼺/όiUE􁁙_~¶ryQwjbិ<_OW r6n̺7H _K7XN|`V:aΆa6wL|LE O_a˼7x (ΡX# ~guVDIe+4Av)Қ6GjTQWVuBzic9˾$tlmYn: sU]7yqQ]eHmq(!h訲yI%xK$-J l,w/>޻$/rx=jƓHLꊩ!<SA%+B, F& X [ t(J胴BB s֑h\bS,8Rq8"]l&SI/ ?ު"?=#qO֗()ŝx@C!D5 VyP\=N1G)*ZUƠ$e 7p6Hng ^q3H%qBʰ֊UF8U L@˕VB9a)-[9ÛZgTU Q)RxV DW:Ftkxr-嬖1[>Pyd1muESH Rͤ)_Җ8G 0@"pZC[׿JĠB-DžJT:I0pwh%L[(A Vg_L?i2)JI^D)@JF%ʂ۠,㤒2Ѓ魗>@`y2G'ѫW{j} FMW H]Iuk43 WR&y!qrvDb:K=uDzW(yxP'Udz'PDM+ D닡@SY7۸u[**gs/wDLI3(U@j+p@A%]6*&'o )JārYr̥dhW GtS}70I>"6|g}&GKI؛dddMz! NNg#Rw''*x>}z`Aͳvbw)S,> ih{^OC~K\\$,=pѣJѷ]whi6.$E[#~w6Mo]YzXެw7Q!аMFŷˏ{˫&jmʌ,f%ŗ~],̦^xw;wi4s<+c?N~{PUFt6ƥ%yuNq5.K2^BW&tWp>*cVj*”OS:Z۱0/EY?Ar1TO;x5w:͍[(ѐbJAGM1i\)jm jz•n_;U-,-#OL3esSQ?er.fywYO JPjd%pJ^kfEDCŊ'M2$?$ߢr'AsW546TeU @q87M+@}X cq;渱v:Yu.GziӱAAH W/b4Mp!4׈<1#jPMMJo޿ߒA|'xr .\`B{OW5U-HFt IOpE6'hZτ [ؖSN1N8tj~X "0tJ޳v]zLChFtK ]%BW)֡vC@WHW9q~5%^e?//wXp iLMQHJY{UXEet8#%G&τPwCȠ?Pr5}:^RO2ea#0#S%&LgD M#\8tN*wFFJPFʆ|ve-tP$E2''86<Bfc]%NW y#])ۛ+T>tl j*NW e+@WCW ԌZNW % 1ҕaC:Xp5f*彏(J(Fr)a[>Nn7vCkd]О@tSΤ&VdCW .\*tP@W/BW eFtŸ.ŅwC{zR4rc+Tl 1.]ȄoyLtp&B&;r;g0=qIBsӊUӳÊͧsOh0*&5<2F$Gs Gܸ3\y&q?=?^ԚI\Kj1pq:wd^ˏ>\^R\)yEKpV[5q'~|E6+o/q=<>=C꿴9a_ԆÇ]I Di)&*оju/'H"Z>\Nfd\5鹽| _ooz3-3۹JJ{9.H/圅_& {)xƝ|ܬjM6|t^Oszp8j(l_WՓtrIxFJT?cީq34MZh6{p>^uߤѢPܜ.~4ZrU!4O#SAai~n)ӿjEηwsjfoEſ]/Qhzk+td. IFGEQ_$clSkv8d<_ G-˛Ο"Oug?j j.}Qwg7؃G%+[u)]歚QȾwM\]0tQnC-ex}\pySs;|+e6:hRL`zamkښMS?[PWͺUk֬U23莧]U|-PԶntF{LPv4/@s%eٜNhUw7r:/%rwGWr*U:B@*db#+ "`AWB.tpB`xJ .9ψ`U+DH*]#]i!ɺBc]%:s; %G#]a)gCW n>[ -޺J(`]-NI>|Cnṕrzd=; tS KʲIwCkt Q*F:BbRS Ul b)pBW -}R@WHW 7ϜjK`}<*y,RE^\_xz5d#6tp(}$m^2VQ~.⠸YuQFGT`pϪoE (! .3 0}JPrs &Jp9υUB@WGHWBm*:Jpu6ȩ2e} tut%1dDWR/;Wf &F%b#+(]!` 4JpɅZ}REGAWw)ÙRV५GW*Mme7/ɤκyt(.YӰpۇE~x3Z6{*c{+BIVmy J&D_&c' &j}h ;7rp*ɍ uk&!_Kxŧζxp?XKgO< $վZ3֫g0flPRa00,akQ 1ӹhQИ31667gPtdQ3#j)&r({TE1S&w?L}%(h3nAa(*xr:ølS1X/@9 L4VUϙx<G\s `IcJydlQ b~όl2}!iX0^6MTu-x\l2:WTyKZBLRnqx072b/ͽfٌ:)^ZC93JZmAr3Z-qxύ:~p6%e&F8bTȡT~X; &d6SͰqK56Z $ڙ%#Bfb г,֓/o'2K cHLᒝO2=ܬ-Ń1+W"K @`Ykth!Ѡ˰@wN d)~YP4 6K%-ɞAE!o-hN4ߚvя#Gx^ (R`(j`M`4ek6>k5\h*PPTxt\/Ae>\bN$؉qok m&{`2%C fC0^61'%G)Y8(=cBi:_PѤ;$LXApWL>A@&#w(!ȮځvBoTzJՎd<'6$P ]p,ʨLPD'8iFМiϧԦ:A5 .0 ֎Iy@C\i4[PPg.Q r6љ|6B@(fetM=5T g @QŁ.єA)0-:dX^*E{e̟h_uV* );v*VbfRWU^TuA4"fߎcwG͊H5[ R[JnI]ֻQ"1v6h.4ck?6i18XQ4eLCLwCi+telRFs| (QlH;,'ka /"jeض򆃧WHl_~O`V=V>?w &' m3Ska%a1@ /2TeW$2$]+T<uq2I VuVL3-wO-@qB'Z!]td1C4M1 HYPY+" 6RꝦ r뭛ޫ. 2ߢvw@v@ |vm>y+]:1AZ@~y1x 7vw?ovwf{ |v1Y:: kiN5 Ca{7a(#6B}EeFnaQC^'$tyXOyfDke,wЩz`m!%L +t9Mg0zbql!>oxD>lɹOKAw hn&ni5‰J;Ή-j =A>JHn49ba]孔K[`p ]uT}lz ynz*~pڠ@~ w]-4,0z侐?Д n F{=>\2 'Eè8 NR>03It ޣp \8 Rl6vۥ) D,0rj;Vàݫ]m ?GR!.X,tSVZ)s^=70n$Q!wSd0B1Ec'ǣ^ry,n8 p^r4vn .m"<;{3~ctr[&nsϼmV`??S^`9|s )mn[{ J[6t8/"|̇ϸ=>woX F>?XO{qϟzs)|['pm,yo^pk0G w=<Ƥ?[t"UXf=+J-e֧ߝc(ə5$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I Mil@P+J K%7ռ VкIsLA٤I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&ɭ) $֮' ` j@@'{4 tFI F3wiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&8 @z@h5IēO:$P6I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$$~yGuշ7rTzw~o$4LPv:AH`.&oT|ws{w< ø rXN%Aw=|ڞ7wՊjnKm@ۖ+adѾxG/^|w~SϮbӳmo_beTzqf뽯[*9nss+1(7G_^܊w3ntـِOexvC8Oc &_7RJ5{W0anDŽ/?o?l?hh^cr'l-&olҘ 'g"IyE4M.kVCǵдt4-(Q>.dVDW~cKO^61Nv(]]E ;ë+u%hcst|Ap=AhAǡPFsbx]E/zb v+[_^-GW@ٝ:]ِU:GBJVDW.Zj p)2S+yE s+N~|x.8]m/m=xW@Y^&_4y}@66"|mnq}9ymNXpI"B&yڇ\wѭH7WD> eP7&+eoBWڍtt7NweΑyv5t%pZ> m8yCW© WWGD> m>ȃP=|]՗N%18y!A(1JWgHW"`g֣3k+AKJPFtutd[3y0n-OkH]g}*5|ز~\A%@Z'փf6ҙ]ᣣ_E^^A Vh.Z sFP:݀9GGC++׳_,pBW@˔OeLJWgHW/i56 tJWHW1$܊ m+Vs{ hݱ}J甮ΐq~{AK.|;ຣUpJWCӂMe|vEi|h:EbCS8Еi-tɝ ^M9U5b`G\:v>0ќ:] S{+*pӅaoN As`gR}RGRWSWJJW_:Jg^ ]oWѕu|t%(kmүBcf1~ `Lzt'hVGQr3'S.-ږCGdnRWSNU14 _3;sȥzdwYV۰x'^"]1=XǺƙRuJ(035YIp5 ]ZqVOW %%ҕH1ɺJ ]jUB{C@W/$Qu>0#1fL*%qwJ>0HĽaieWp>ZҀ_"KkDx ޟWѾE Pѕ~d3b?b(ZzA8tu Z7BQǬ+}]遮)T {CW .}VUB@W/FZYEo*e/truJ(%ҕU1UY;tyNW6 Gu2 ܣe6\ҟKxq(fϾCMuof4WZλ>I(v0"3%}ZJU*]z/Nhi %g]@X+٧- ]% SxJ9ƿD ϥXg52BBW -uP/$fSM{ZyUBeҕw:)a1jFXWQM_oE4?>f!մ4 (aM֔^YX ..*JM:JzDקtgr&N"z,`.Np4\}Jݱh>ņQ.؜P=izJc*]%=RhJDtPvja/EWaBt*>mxpIcFI*|HW񚫽I_@aX:n@*GSWFRWN|ȾWYWf1)Rq{yՈ9v/˖˗ָ-'<'J}ze:%9@B^W% ؝_Z?{9>no0+wݫ{pݴ2|K#s[#EΨg.JQt%r.~KG[(tA2UR }) ?ߦVȊ?x= t6ܗpo2]ny_`jzېR|N}uSkJĘ-\ݻwWK#0.񎮋dug%Eoap9N.-q75Din%! VmYU[Nx4"AM4WEiB"ג[̑g!)u3IlH I9 ` Xy$RDopxLR?+ڰER+ͣ+07h;Ur3m/z6[m`oWRt}7]fӱӉF 6FdlŇh5-F[*4^ISm6Ӳn,&WLrM<*+݌&*\0m{a,C* |~1pjh*"p&2G8G\ € q#%EQ#< #1 pb^a8'RY`ʤ ,BD^p$ 7Hb*1`"=!"}:)RRA8);%X2,xGI4 GZ& ?dKR$xn"/CX}4n5ǖaژP!jM*4A*TQ* |Cni\ la#sY>Hc$< 9n,';< L$r/#rbjf! D^\>%v| ,M$^Ȩ gdfzc> tLՖGGe~9=0]m)4Ҕ vȚ &2Lޙi[_o֋ٟP,.Y19/>*|:q׉F#7M FeX;4L 8Ɓ*3|gl0~NCXl4ߔ"j^!_y'M dugn~F6$J,(M+LyPHlgo!jQx}\Z2:+ 2_HE=7Px]?}ΑeyCzo*2 ԰lǐ:'ZJ+hG*'',GrPqZ?ڢ${ IZ^~6yt-jA+9@n8M4étkZ<Şt'ai LֵE,yE9c㘉` ÄOBoQѬ!cFY`8iU"bK(3ø)ae58AUrir5jĨNS8*P}./nX)3 Ѻ۪`4`J3ث'W)VkڲWo\ G 5cB`Fs#1 E‡4(鶑GQCpNF|#ҎЈɃ !I%iFTj4vCP q9-RLp. iMExlH%1QMKc5PrU<2%裳0oF"U,5 `@Y fRP`+lfuHƖ@;hE4OdQѣ0f@./72ZpodJ(p#KryՋb&]^jbJkBY"㹢IѤOj Wh.\ۙД'by:"q\BuFFCyOQNDI$1c@X#Dr> <;6&q X NG`"D` QGk[@"Y*S} O'8-Q1HAThªq(Wq'J, 9V@Oj7Ka 2A6iL#juGLZ0˳]0DWI E~6݊E w̕$"(P |0Z1>* t&b~낦n lmޮܦSɉ6UǞ~pV*Y;O"dU7_O}: >]YtIU~Ci e_᷷U~SW}T#a?tMVXSݾ۔Gߊ>N˖8rU3۪~k0(jV,YZs̟-Ӱj8W2:?D#b]z—"Y40fop,-pdزDzd~%Y~ei:n;m`&DU!%=:@Qq.AU;190NU>VbaI." %GW0&6GbZj !*흍4a>Xs{0kskW޶gr"6?xn~z@kK&SSg1!d+ <[\.;1%3ų=([>( , U#ĢXM좲bRI R"g|I{r(C+|rZ ]dk9fu;_ AUlӁf/qh6:%@֗*o8#^Z~^>EmQ]Ctb(,jJ,Z4`4YV,Zc*{_EqɚrY3& W%U IUǎHΑŰL.b$UW2R&D}RBv&_l05Be*hCۗbCA`Vh;Y7qv] ?#zq%L)XǹB4 &ŦV7<!`R9+%zkO?gS b"![W")K@yYE .2 Q BV7U.'4.;&[|bXUo͙T|!p.$lEMUۘn:1/.t5-rkY)sA%*р :eg" p2($yJ~~;""9Ofi!Zl[7hCI2,;@}] Q`g 61Fj`{o5pnIq R :$d ˚: 86a HSHS݄C5g"[)52RjΜ@VS6]kFC'ՑpkUGUBf94O,0A. \L-_$0,5EaJU8;V8I`O]^8엣7~Y cuwz3Os?}͹WXv'cnT!kN uKG8ݢ* +&f\2*/?쿂chKU{,hMPwAZ.ώk".9հ*bp0DGMa6RxFgG_nE72ܝmwOϦ"xЇVdwR:1gb![=q{CJ!хw= PX̘:V K}}W?v;~F 0nL!ldZĈ}aBSۭ 3}CƻS=RT2p;cz||?:'*nIwvƷ.:z&tdCyo.mX*/5i2&pcr5G4bb+'(P,8HjZ`=5úsZ̷Q/>EP1:Lzo…6 >ژfn~}" SA6h QTSd]AUJ6.6Z~Zw^IDnaC!_/v {D`uVq<ۨ-9HJ:C.9:%z+P! Y{ V~w&]H,b(>%ޝ/,y+?d_7=ٳdmTңd[OKJk6fhۍEyS֧k@Z?w(ƯRD,tDj I{]]JLetR: 91UbcD5 ~r""U.X1k u3FqR=c7q{~X/K|µD..2ް|sӼ*!~8>_rNcY'.MɄtaXzlLC#{9 DjnzO=P0FE0V!Z&vOgtbҎxm3y >ZՅm4f4"GP:G1kE'}1eXS1ޫUߚUP*aȂ 5.bUa p%/LՑ*B95!.HAdscc_h;{D;y#>b*xEHl`TX兣eW IUũm2B}dEJi 7fVꃶDg2aMAC1@gMY޳<,~q>kL{ݴd_:E >E*xh몌+-% !d? }2Pk4ŧcݴc_:=ˬli~'5r?ea˪gۃg~&>A9Ui\ջ+^\Hvj2rsK`Dj = ætpӖ!zvT/}^<ӓZ Ejl\wK{1EقO:TN P4[daNڨhFrP\O9 {#נݥYdy& 㔋و˚_J` [@CT%bV=YMjKEfD ^MlL vD+,ΡNDlf){9bsĒ!pQ;gcNerם{bNEL،@N0[,oYP޹&ΎaMGTeuco$GBNIf~YsS777_.~|͆r.ʥ,:崬C@=Fh":-Q(J-d)uya޲}]]l}XUbxFc!ANu$bY@B(P(Qwګ49YiV&mZKN=jZrpRZ''PMxgg*hLA%lTu !QujaMk"p`dn޲VY(%S4>a9:ⱮL?X4q??i"k]au/͈?~/w3YN?z?^Zyq:hbOo;3[?mG%CI Áy7C-a?N?|V9?nER5?=afiOo= kTX+\ע7<õz{]l?_^٪bbiR]6`\_e}?`.Xˋ~z}3{ӹL>Y^Ŷǭ)IK[B/}mCk'Y>[ܬ(NKݵΛŻo.Ng@/Eh=Y+;?=Q'IJdB>p^JAqO?|v{^0*Pj}N%^VgѨ}fq2?w!l. #8<\vhl[!TqxsIo4gWW^돜.kG5]_\HB8-6I $c |DKfm=X(b zeFCO/_Ζզ8oa'I|m퐴|ͦ>Chyr9&nƊ+ :Ɯ=RA(z%Uvubl_{hDz~ǵC{0 P1Xs!:Vb`]SNu!ftigK'}Z##d߭/9g}BUݻN)rtoGbQI ~]AM].Np6O߲ ̇t,n\(8")% CN Jp+ٿbi;*~;{X`v˝ q,8߷ؒl9֗CY&yx,V(SRqKₗN/KAsJH.MR(hq{xcWe?nI-飰t//@I Khdz| -*DKĬH$[ ܏G~<ƉCe}(Q2 RXL>)D%th`<Љ~qeC//5ogUwg /ϓ ^tYgwi\-P0kz/].N"z|aS] 4hPkܷF`oI1@?Mn|؆g YlX,_Dcj6c1a}О jh;VB&ףE|qr^1 j2*ܲ++--G Sfrsլ -+V>T𤋮JNc)y~g[V׶Lq=,x/w햲w wBMxl͵.쨺tTmntZY]=?ΫٞJoqv?ͶB.pU ëx|WxKǞj^+{CAMe{*^6k=_V︃p;4W_=jLsK[:$*y3h2`+`n+\v[ђmEiv nr# Skf#|$-l4ؾOg>˚;4Azwqɐ!4Ee 8CDkzzR'1mBh\ Ӎ4J8K e!Chb0M 8%wrh^EIDW Xp+\=%ȑ^#]UBUl`uQWV*w(FztĐA ]U/jl?*Z}rm U2f@tkS3pa0-a骢[^ NH{e3JH4[/=NҞ]Lr04]tEk4]QZ3+iP ꈁK=p ]1Z]}R>3BMr+p ]UF0k)1G:]^Bu#GG {=ǡ5'RWCٷ#R#]Jrv@tŀAUCWUEٷ#]]3%&mӋx'@ ȿ7T5mڔ7gk96xG2P6,jbW\at6"QAk50鷠b~>e_LnڀϵeїS?;}ߥҁ_6RPdm϶QWamnǨߍEc 4Ԥ0Vпp_ͻ?yw搟xb'S.< wלQht{'߸M6M&$$0 tpthQ]UGcdy"}<Ǽ̼qw/ :5vGrCQ&cdS.Hy$4}MSfd4^Q_=m!($(2)[fBRQiI*@Fa 8"H2SO6 qOSZBS^j٣$巹qHR>^,AzEo+!P,%{i QIUu0k%g_c޳]y6Dh1/ڮhyaYrk۲nEnxA4^ƠQ XS)T*eiYF~]G{V Tl Ց @S/IS$5QD.d:GlajX҅G;9S{$"r1xh20oʿRy#'`NX0.*#JF!5{)ʘ#"(II+C$E+!8;<|`lr,>]\Uk).̻R`X1!ykDAM7A#l@QK=AKCOnY^@)AϞeMAtBlmHR$/ub`Hpn>=91ORv~IqTGQG(tuʻ67o. !06VhGC@FcUvFq4ӊS^fPdRљuH$#r6) lL֨2Sq$ k/SBH2ph.&U"XISUY=9߾~ M;ǃ-A<hS|h Q*}Nx!Q^ ؈AjL&Y_E.$Z:P > ?L yڄ+?}qlJJp>~i>u]L%Za<&m*֓jho1Z~2=c;_Axg lnD"Or!K- 'x*R& \2q"5%X?c6'G-T\PdKAorҬu22Iu͌όiθq,P\\xm*-۞m&]LwW;s^@//'_./fmHo2TN 8 DYR1a[ 1Tc'% H6A~ 1(ºet\D4c ~fbFv5:jdQM*XU 28"%tRARJ2EZt٪1X2+ClEGȆ&eF‘9!J"Q:Ugpakԯ"+1;2"ẗ02Ȉ;YTA-M(dt ]F e#UDKP`uLSFtV SmLm$$ +τŬ-iU1#vo]grriQr,/RǼ#/E7h0pN%-P/!^Ȇ4'/&NWX>4@aY65s6ϟ5rn ̵uXn7z?k q5dFLJFdUc2WZ嬽;csn~/m^s@{~'qӾ6OI>oԴgq=[NS&P.˱;H5GfUv_q P{iJY(LNi 8E |0}!\|qI>{>nލ琍]vq9ю&gY:G2Y2XĆ:468Y[iL349\<˫HSl$ҧ$ BM&]C'$hr|YA@"NyG sJgJw9A5>4vfIjpJ^2,X0 fC H )\ٙ)FsH1GτPXt"rJQ[i)ҤKy+hg}EjR|a(o%7gf.QWƭ•&Ә>&|HXLΧlv]z [5L]s5m5G?Q,Vv:%2kכލ&M>"}D|w1bw6#h-wkg^Ycuߜl&%h9^ĔrԠjsd5@OP[% viZ^iUbmYX//5R4CٛqɲW$Y+~9t%VU>Aftk?ަ(6Nl9Kh0&*A4{!)i8(ʊ@{eNlOmL+\OF!=yi,F(k+"20MI$w`X;0urԫ:˯=Ӌ/ gЫZ۰jp"z'"@bdz|Q88!Syс+Q҉`UpyVP$̫@C,הOU?u).d'.F,UX`Rzi$)hi8fȓ2.p8DXfAUY#zfА\ ]W錜Sʵipp§ 0_Ҥ7<K9Ÿ\4J]"N5 .*D.QXћ,γyߓ)b:|r^p$`*V> @LIz⣽K¬9B?<kg6Z/xُCvx-Kx*`6J!4MFrjL;/3 A9KMN>~<) ޗt*Q.rFLJ&9g$'I2AF,TE&9phhoE鑷|G?/dʯ ߍ~)]iyv=MLulPk~.-t/|bz'"K^I!" VMt:Ы_Eqߣflލi3ODE珪}%u:nq|axg{zW iznM)mK%mϒ g1g~ VŤGry)ǑZk~4 %׊/c3upMD2^x^(\(u ֙@pM=N(i߼Pe8cu| 3kJH֎⃓&KD5K#F.O>]EM&^М)p]AQLZ]fD: !Lr4£h%0(S 1ʂ LσLT%QII9CM;T1P-2)H## ,Qk$'gXS HF8N@NJ3rHBE3\ pq\cnwo)ە{5mN$>E+UN;Fd4aO% F؃5lKyR{6(nliU= O J11ߝ㄃&%2(N%ĔV:H́9tQy:<]D?߼?5NjjRՉbV*m8 c -UҘp ԂԄfΤ&z#nC=t "A&H<ȎRJnt vոz4eVQZ2H3q@:xDM8 "aPWDtN'F~<,j/^8Lvx=* Ƀc&ktvR0/مļ,蝴H+%Bk"hegsϮ#k_n髿ۏ?oP|>Y^Wav/[ bǩlr=cDx! fCz3regÓW֏̦* 5pK*Ȼя]z//.\dv7"`rUޤiQ\\#MϪ}bj(z,c׫_9|͝Fos0_@M/eV}X׫tMLn^%ԾM |hubq!!>:;1cm-O\}փߍn!l+f=K=ZI7YItŨJf:r\1/lrůa!|vԋx<Ϳݭ,3y$.n}K|!;mVϦ[\4]('x؛*4/ ߋtrlA4 a}ܹD$m8G0|r[kݧ#T6qm-tT ΠM<>,ڋMK_OH|{~B]up;P<.V vSk55cOiZGIeȡV񔢉D!|^Ogqby]4A 9ܷ' )t_T's+;N$`wd! P8-ƂN]?Vh4yh4]~ޣ ú${f6F8oD 2@ℊ(.1hnq6;f#5&錧jUzebpBFQN8e%#)*;s~tu6s`}UR}&'uкhf1^}&Qגs][>/U_ c†Vf/ o3/)QUgac< 8ȓ=y#k~9 w ǩ*$Z6C *ФqŤT$^bJ}AXhNPkZ W[HI)C$ 0$ Sdc9qgCX22w2!%#eyhKh|}\jݯؓڨv~Dp24D| NjA˔2)+=pHֱ-DaS!TD.4 3pXHSZ GmiDѱtFΑ[CARz$)N'c%p!" V Asf=L_&#CMj< )6  mCOvñvGj 1L )ÌeRtYFʓd듌QNu9xR%7K f%6+%sZs IGC=b<t$s~_Bc`|&s匙1.j|EGfst\2. , ϢpDKƲädylNV-Td|0e &e&'g3){bę,39j! Osi;J?-P#@4I¡EfDTN t0ar"1OC9 A:Bh9jc# QʛLiҙM4&ȠϦ PLArQ%qցd'38HDD 4̔uiGM&&ee4hCaZhD db $PYi+ic0zv'm3Ft 'FlkYio@>3${3ay sJ,J)L$,V `Ç8<-OѲ.ls+m&YUXsa.=D>R3e҃+dSTc;h{rݏ뱫uq8}Pؖ}4ٻn+SyG¥эVURqC^y]pS$͋lT= CrDICn{c fнz`e&d&V_Tl&嘷]x+I/w7N_߶sQ]s|nG'#ԃ޶ 7Dkӽ@Hj>5&5⾁pIcW82xc\,sdv;}@gϔO.`usVc&c%͵VE8Tܾ֔N|~niFi u_-a%-f-IAF;u$Cm\\RO[!Y GR9qcl7-f5FH(L[6b*8_ SI$ B[LOQսݯ4NZpRvpD$dk11֎]Mf0s'qHs`jzhVC&Pa/=Fq]C4kr=H5 *%NgyjHqDC${lM"sn$dpgk7f7]RUU|h:.(cߌ{3:.c}:IL&gラս L&3ZZQb> }Ԡ\$bƳ(A[s%Q*ɓdR\}-T[B@J䲷 _|Q'Mg'_S$.pb1 qؘlbn Z W ݑǮlo=VȋTsp \-%#B{o`ݫ?[]j/p_ oe\\5vneKc_E&'Gou,a1[ Kٺh9 J2 ^ѳ_]pieuje{z9t*_5?8] ] Tjft5Pꛡ+ hv3np9>/]jG3@W~OWzK$bDW] ]~mMDWmsRhOW/E&Z] ubt)t5~oڞNWep{zt%Da//ַ@zq'o~\&<0PW8vprܦ3Iuv쁧<3?|֙xP}\I7a]G0N6rxzo >1ǿ˃~, Ybzn>>?]/_ cZ]Xl/lۦmcZ1%gPs+TJ.Rx`e40b27R2Vh@yѼfvAt5ǰ@Rj5PKݒ񋡫ׅ@\dHWFU<qAt5ŨgOW=]@e CamF n7⟗vB sx ځhOWz;pp +Հf)t5ЊΝJu{ztJ Lm#.fk@]ǫz.Wq?!aVZ&Z[`~v+>~%;/0~h3 8skKTt(VBWn +V_^Zs͞^ ]UՀˋ3Ъ̝҆=]DbF$ץ@~t5PHW⽧ٞ'eWM0Ts~lmyV$U js~u0͝J{~4fp pcX ]-(]7CWS^ X b n|ZvBKPm vNK+,WR h@uOW/DWޚ`CWZv+o1+[y잮 ]^u֛5aU~;<:=]- ][/>9MuÃ?QGolF_gA]5uaY?Ǐ+޻!}yۤ <&^wW:::)"rk1ƣuv;KG3˦ؖmZ[wm;2i*l{wÄ> GF\zw9rwpV{r!0śZʛ7o&;uXSŇTT (|`>ޏ-~8O!-CrnF:ԽT-sS`,YVd8խ` `ґv =5Km.5ێ`F֙$BHR l J`)dgdꠢAm>a?;vv#5^&5%Jr*8sCE 1 ±&6|\6!Y'e0ohƷv\W!+3\"5neb`daTI[ J,#s6ZPvPbAjGU((J*z@Pɕ ,>Q1N-`P `XK fsAV6`hIV hAQlJQPlbGXal.!lk}~ZEV(!.w8J͐jPoȻb+"~7(c)Hx"(p,RX&Tt"N;u& řb3|5g' )! /ፍPMhȚf%ˠ<$]*]݆RRMUe LäcjE *S#T[ֶ&A"(ʠvPJ5K 6E_h vS)ia<P9@1^C d+}` Q5-,zjA+}[ PФ td]99vEQL])ʥXB2~ȃTC CEdU5jV!ktF-LY RT% cQGQ1iw:Hc[qO5IQTcMj hNmwZ'JJV -jf=HQ| R5fxY1S D [!TS.TΡ~]r:-;ںU!*>@-iւ L]i*C-*mP Zx?Xỗl] aв5J [v-Q+KEB[?)AHAQ%{:B(T 4q4 t 7 x@.* 6e]VItӥ!D 0rHUBvjIp* ԅK;)Tb}Pۆ hD;SUt `j?ozFn/n\7[7{$%;Q/ \SB3;WP^~yUs|i+b}跶h~/0I`7Yt} 2l7ir>ݿ6o6O_Yg}mc:rƝޮחRsr!_=./Xѝ5W[oVm!=zm57?;5M컟bTs2.yTY.㒧lK@Vvl\q鳮y֮׳9]OV墮\R[ >GCmRLcRgt˳;qy~bfE1mB,?7OiwV7 $YZ}طM 4} qqÞaVVLiysO#~ՇE"n5~]^mN[ղ_X?[A BiHW^Ύ6nźNm(!BN'l6JlpnP2aVOQ߿j\Z5gl!H?nj&z;xmF, lXlXhwK:P&22KCJ$*XK ]\ ]OJ뙮FHW1jr+v.QC.t^ &MЕ;ݹיU/VU?zT=t=~(A׃c^ƨ\ȈXgDW\к8t"3]j,f\\ۂ/,u-e} o~&C:nO~JFs[uYĔ|ᚶ1x.BiS`#E6ઐK$V=J/80aGe+kT.tEh:] FHWVu?<ps+B LW#+' 9:ۈ=0Cs0]= ] ZP|n*iNWRR(J[p%BNdZIca2*e3i"UIpU6;=LciU9t&@Uz-%B (ѯ]і֜ ubWX]CkNDWPځ-1t噮ZŇJj:1"BW6mj5jteDWl̆nйZNWR1]Q~ǘzC.ǘ{g;kA݃ר5h'wYJ ٌN+ g9pȦ&zP9FVӜG]\or+Bi*X-e!]EkΩ&ntE(-~=tpNo@~é'֫C9ބt)2+6dCWgCW)qwLWCWx7p\$)-/. %`t_ozyuK?k4J6ZXx6/)~ŪIhQ|ro:ȏg0L_A7=A3]OD[Pst^zYo6 '6M'| g/v0MGbgݢj g3~.G])TCюߺ;%4f \]D%/u-Ru-Y\#*޲4xo0o'6ܱ3h֯/xvǣvjgh9SDSmUϰ2z,FpMEZ?xF(e6BFsOwr7H&I&SzfP:c4^*2+:Yk6 ULWBWI]px/Ǣ++BM(=|N{:]pP."ھO+B (1ҕG3:0s,H5rjĹFkAD%3iP1&~L?_"Ui:Jç+lQeSJ^%]i7U٬Z;x"^.O>g\V L!?P:cEcFfDWφӹ J/xfteETBgDWllmPC+B9CLWBWN*csRWlŭn>tKouz:]ʝkCW^ZsRW8森W~hO~(f#]i{R g {^KwsIh|6M ri5bP4=BJ>H;]pNtEpc6tf[ҁyV_ ]IqXKq. 8xή N'U*p\t*3+>dCW˅sC+BHWJKs = 7\ hC+B4w@CO'0->G@jBcXynO2Y۫'7g{5}w Cc2W՟o.8jUwo7l,/>9gMM[%;C^?l9~*_b-ffm]/~m>`X!?;#r~=oZ{"DiyR}Hxr*_U9C,Px_\#r°#G͂ZGDζ%/ M2#јl:u>NhV(z}z];-]|>yT֛8)qk02\wU@Z+}H5\&4rh"޹%﹧Q#Fė &BQ3W/>hO 6Utc&D28樂R)]k6?!!f'^碟֚&X?G?!~:[x7@W͛j}a3gʢM;[@<4[p{W3[`=|bk{ =ag]Nc>7KxyBv{mxJsx~3C){šOv36}iy4VީgtԔx sJؗ%W9b>{捬/s^@>$h'^wm+i3d"#A][ҫg8Ç3p{* i[C!.I[H>S^ut0ܭFڋpJ| Շ3<2JH܏3\ۣ㍭E2_ػk|٧J ))hKůk71-^{{wob0p|S[hy?Hη0*a)c^󩇱p!z@-"QcL'%zLz'uGZ^ 6u[<GbީZB%5O?Z[-PD'(*v'=Fؓ1lӮ>?hꏎTh3IQR 'Xr$*Ø[x+YUbk3|DEu H)~r:jD>n>ѩ;t£t&5tC(nEWm ,J ]!]P9yWX| -\ȦXâ@/h İg{( .%kʻC F6U=LNt%l•ЕEutEq,tutŤ2\fDWLjBE6te ]YʤNWVJ `ϝ#~+s Zt|~^} kݚC p^%&K]Ҷ\`腆VkS5Иj`VFz+-KE˻r F%pT` ]Y2ҢUxZHWF(s aY" +RrJ^`XC7銃{qhՁPzH$AW"DMUOeCW!{yteQrY }+jr3x8 ]Y2y(5)tut2OJV8 _TxTArIO wjh !eiϲ,fnc5},*DϨ'UF,vv>j:_/rq:4!ݻ+39)L+t* .l+!ێU悔lRsWhhL ە 0AvEW e|1'voϨ3*qOMm{'h?:gq1.. 1 2s3&v ~eͿ_?~ӟ]8]xkN~ z7E=SNs.N/&ti[h-0( }SCg$m}L'L=!E-Tͻ0ay/F =dlEg7.Ug$X&఺;yks4{_7Z@,8\Ea< ^U6 z79O&Sg[~~[ aam;o_볥q/uL,gA?-2^YśE5u$8ąSލ 2I`(eIcxQ.3@&4~PE6h\ rfL#0P EmX5o%0k۱c{I sxڹi%䖙z޲?07dpњ9_{Xg#wU2J񪩕PiYD;Zx>}W>^Wlnz'd9L׀huWCt@eukpN3"4ca5ΓZڭ7eF뻐xo\S! QV|*D$^ b?ɽ+QkZě-@2n2^$mI0Pڑ 4HxS\QG*Ig〛@2qiZl(]\C%6̫c2*AY3YOBS Oζ.hmnPRbV5n4nomu|ZFNUcwˤC|o{ f;* Wwo+gזԍ<(b:/3D@]>cOVL-iW-AyGM mxQJ}3_Ϥ1HLǶm\D~HfOLrqLv((98,f:Lrj w"Tz{"p9 {>˜KqO3bsdҹ,"1' Vփlփ(35L2FY,PV,6*cu>Q֫f!c`9{6=pZYNO3%I{)w!qa!(--((~Å$sE6/b'ޟ6:OVHն[a|wiݹ>IT jЬ&-*i8(5%D5%T_pH5tEG@ !t0!ĉ_G {nf^;{<'j>zf 6{ϗE4"MY\2M㲈7ϴKbzh;"Lf[" BYə&ךi7?-!1Zwv(%]O'A34KsW`0̹E,|+cWR6~Of)'|9n.FҤW9 $%KgQKK[|v|f*rNMNfF9]N-,LOKQqq~6h;pHvɦ/U`#AQ o?-X&!0e6JRDwm(մt& 勢n^]-]HN)X]i\j#J 9փ(a$䳃XLkd]<(-]r.>&賊G'hRYYE`}X{ ŷR%^}޴.v>qԿ^כ[ۮs"=yӷ8Gtr54⧪:k$PTl"K47 xhvK4{!@[7> \~t27u@PP 8؏ܧ˼Vv[WĶWmwU'ZY(FPLv2Hh^:AN1~]\- jE^ә߼z#׿2d1?f7"7 d-[eIOdK-bM<xd,xXO7O7R4Vzu/2&*5G `0id}i4+X7! ƑtTh6_?bũ/*B&L6Y[?d>GR|x?WB&֤i9w?凶v "dxӗj)~*8OwZGBN48(ϫ5쪘- 7I)p~svR8=1PFҮO}U@)5d;Mf:0ǫUP@~{&E 3Ooeer Ns~Nt['xБk{[/b*Ӑ?𘙭`:hq8i t8!0*Jdn >&C;/2Z*^!<S,-/B<Ҥq0`INe=q iR leP*DgG1:[,o'5\kF1bfhb9^]c; P{0q811QOXbуww_*=)+䝛WQJPm;/,Ffjhh6{Hhm7h* 9MI_"ؐ4g)}, PHmeLd~,W HXŅ4[H5rNLr_4bȽ VMnR|ќ;#Bz{'ObH~ ´ B#3򻍱w.u1oڽ-/ݿD$EH*)Ӹk^35"LsL.1yXS&`g77VT3Ӯ2f$&YO(cl/8Ĥ hM}%")*Eȑjj[0'# &VF' ʔ".Xg83+Ձ d,K:nz‡0AT6N%-NCRN-B4U_0 1FPݷG[L"F[DA}YM̲; #T1@|RG.#$Kd y鬡-ʢ[D-$W#2eHΎXUi4ի4 N$#[a$1F#sR1h]]X:#8gF.j|CИ'?WIdj\t |LK'pW֪M3L{U@=m=b\N?cg1ux>ʏLI_c}?o'O/>ӟ'7[tfƗZkR7T#]1L(+C͆}<3g3[=.~NhX>?qޝv# O0k,:EMc9UZ0㛢+q>tm+gcLJr>0ǍG<5IĪpt! s>-cF F,eUxb9\}?.4NCO۹LNJ$>PE:=]wѮCI-#d 0\pͰdI뎇{oO[ axv#ù n7_?&AM2'C%U ~GC&hÂH݇l]7 pJ{ߞĞ EN,FkSA9@:ښg;OE+aL8aJls-e':J-_1o;$`d1Or4^pMOq˞QgҫLRɯP(@4~{hH\86"GuP hU\̞A0) mSEqYu:yb斣 S0 hDOIKYQƵbܭ4.r7dRf7}Dp-S >- "]8a>ry gV-ue50lԗ~H)ιsf];,""i>z+('yu2"R/,LS쵛𜊰5ȼ{'*VJ!itaI9 Rdw771iDC 0^.soa/IcQ:uD!BD伹<FOk!1f]kvd+It)J TfE0'%q}hA(8RϖqHNre>P~ϐ}T5L1KזV.1+6km1PgM؃(TQTo ֩†e*nb>]YDZo{ w}PjU ]ŧ N]g /c)ɸӨ ٞ N`ۍ7Xu`F +$upJA8B^Ʊƣ[ QHn2S1iW:*x3 1=${򱺔6x$Q`Vm6Uv 4ED#WI`|(נ\yY1pq% .B,)5Nn)Fd6 Krٟ&ʔJP5+F,Yr zoVIŜJS!}Qh ) mu 62q?$`K<ҋ8ׯ R/B<jp<.9!+?h/Lf]:X7'_g*p@2Z*^!< CNJq7%Vz*^SeB+ X=OK,:CbU3~@VJYtzu5ݒHh $$g`#./Q/HɤLzK 80=H"]c=֋b}-S߸:s5A_,Ϸ9/˨n}y4Ne Cf *a%1Jb$FK3߮年BںBZnFT qcWSn=ڠw9;h#yqHŹvx]O+$Aƒ1S*\œz_Oê cZsPRs5o^Ź,%l 88qi)%u\9PG%ͤ'O0#;qU9e *DJ^)KϰE}|`m5  2.sP4:1\Z5UwΘl%8SU5mnKH!?pQ7vá)qx %0/.AV1ƩvKh]7c4ZUl@'Vt;9k J Sli'" ^3hýD[BN}9To*Qi3zRtvu[0e~F8[Sc!3:gk~bR */G@E4sTfiGnOv.5AuMyE$U퐪##|d*V S>$hjbK~~/KiKgPAF\[,7eQb] A0"[4=%;ZWh[ɒ'u۪NѲNpl 54ѭW]_п\ue9nrV$Ak'Ӭ$\E H h׶ BB`$>)4h3i]5W4 H;1RW@8K'9{{ۼ L0d1?ދҠMvl4I[yS$[t+'ZRLb  #+65O _kNvuU$۸+yՀ2.zB5(X%dl*b /UY;WKI2bA!N>fi~*VgY?>=΃hǍKǙ!B?Nx im4{6"fkP{T[꘎TF1#Q) q%ҊձVEq<޹/Ms@:GH:)˜Y^NܪP{fajD8{wt$UGW0T;64uk?$sfs'+Cg+*4+8@M!A\-Lk)s1e5* #CvLFY.FJRhB'6ȡ K+LX:C/mgT4k:brxw>(%f"&Y Txd F}/m2-:b9_'(\J+RUGGmͻ GଫKa=UJCz((]ʧpU`G׏$˙,m Y~,ɸɗr^#wܑ\G9g_}"n=B(9H8P>E4* +inWb.EḒ\EdI.W(6%RB(RDGtU@6~XL'l:OUR/Yդ),!N1y!Dmp4}H] A}40V"連\ "E0úIy]Z@ǂ4Ɲd%ϗ]2ueBL.Y+e1<A5%Zicv#032A!ɃERa3^XDp)2%+'N+~x P~XU‚Y@> c= /5!zSr Rv(XQ,Q 8DJqjG㸢M3F yv(" AljbMސD|]!v3ەڨMZ񋭸I5I1'\!8@JtdO>>PÊ!ώ q,{8 4v9()v79Ԙ_D83hߖQ}69Á'ᆓaF芝lTY?Ar:hD2*ǎl13\QZbYMI,I1M3ATÙo~ufƤ'Bʸ=?OsPm]iFAHȲdlV2Qul*5vP 6w-+V%Yg}6s'Fb,6{ )46%1f}r.揳u)txwT߭iS"\>W̹Km@{}j(:ᶙPWg?vSB< j_\VlTdQkP\"^~x{ X愨-bR3+GX5"<4nS~ZZZ uH%@(VLl4g xn7'V")Ɛ D@.` !&墶2[T8ެgZ q"'c$-c`yTd">E~~1kXy.EZ3FoiOj?DWIQVtXp`;b֨;l`8/́'/1 :dVܒLټ?1.{d:cp5G7{ŠQ +Nf{}X L4"K5a`">*l%?N˿.~7. )^R+P(A_h57PᷣpҾ @y{J(}~x߃r"X7nwl>IX߹tPFT:Mu;RrZalK`eCǤHi jou|89t9 $D(x{\VV0en`N[3 Q3̃%!V1YJ51C^QdXvώ4-jy'(Y[GOA~lv^gTcœ4uACQfThwf|{j@#+m3z^ RP^+I4I sQI5R(Z<!d@#yUZ)\laU~X @4Fe*]gf,R"SXI,6pЗ(XƪR.ekptyqrUs$[.G<Ø:hF`u[,/"QO%@IdE#mJJ2 Z&o9!E2]Ϥ=ճDpÊ%';. ϩ9&P`g][ɻH$ђ}9Ώfs⭠8]*M2៦â $:h-4JhLޫY(OSn; iTqk녹7XQZ0קgBPIZq%2tZu'& Fa#jd3\! )SYYuvhs]EYIB+sk'4b;3TDžM*-'@qls=,ԢU$(OL3Qf*IS\WK;lԬ\:x 3ٴxFg U%|/1.vd?bEp"dw OcET)B=)#B^XkqG^9v[ 9| :0Q} pEb>}ZVn.;V\`ORџ?d\QVKVi?3VceZ{fNڇHWJßK-5W/p}4n"'H_1q~-` KɪBSQ6,Q ٴTƎqecCrsUdEug{ & =7b!Ѥ!lMDPFwX;6c}k`^Y˄(r^%ieLd&IRv_I^':LȌk]L?ՋA :qYa4 *R:d+Cl'kꈺA`eDy?-1{x84 3-'{řqY:榻"r?7"QXe qx8 $c" AGt `3\QZ Ymh,I15)g2ldzۯdWRdB^4ZGIrN\BhE t!,$EF[2QKTejD<; Ȳ"Xgf BHhi@FZ FN0y( Ew"FǑ`&KYG/( Zn9mǑz”p8&R,X6HSd zӳ#O^k5_`^k1k$8 ?L(+HQ;^<ă~7hl.@Ҭ ȯ4k_BeE" zTEo]p}@BrJN_9H4vиMI)IʋuH1^a٬VHR W8ܲ-/ 0,H°cyپTlSh$LRK#f^1E17/g! eS1&>%NwO݃z%g 2B=q3kF=pAh,ק(A#ȫ۪Uҝ#ۑ WF0ES$buDzr  c7J2$*͝..HF,O:hs/cU64TyLY'vp-S?!O:hD &1 WK* u6P^ҜrdUmYQWNQ}ep77ֿ#'AVo.q^y+zBFOA4 JB_^"y5Ji7Ǭ ~6Vq5ύUP}|4?:_?;IqKe0ų jwи$Zo{Ic͈ۘ% Vtגds54w8.EoOy|0[-ka C'3c 6G2'sI;@ H8nؒxGH.JM7j#9CҴ1tHCkr/QL;R*&6 rE٤GlRǜ@8Q4Q' 1DoI1G^T+MR+N, 3?,ؘV%n8&J8s?>o"5\GO۾.-bʎszF1rD5ή3T6{k 1R<KF,"v21?ׇmb`;W6z? SX#ueOȸeus2?q(}Y \91})~BAx gb%*θ({Fl13Qen^X׽9g9LwK8fWvAond0r5 kQSa{{;ᤔ 'őszoc/E!fI•wN"ɖ鬨gPQVrvLV%'E=hīDV"9t:oL2-EBjѴ)$lY.\ kgsMa^qtIޚd75DE݄] ?(tTK{y3P(ΧUvd5b mQC6z2"ŎnP{}(E}H% vӈk5 4N 9 ?L1J17(6w.uNixeyoKsCg{}:w-% Owbo(X,o6V@amX%5qꪭeU Ϗk⭎+o߿[}?<l_L՟l^)!e*2K@ZWfJZUuc~ڳoD 5)!TxSC@A\08UD^I]z`^n9׍cN/Z"y~ or Erнn.<۬Cq=>w=7K!HGȤTSI 8bGiӘ|y9= 2. DmH.22K0( d{t(] Fr^yACFڿ|~҆+${u }|qk|}xq?/>{/1ê'0¤ls7=iiX?bc+o#4nܰ:"-*fYu+HLL9 N8i鱐[;[ύTo}dž}_^帱sWO #%%lb.XO";X)]cFp֟F>0|XK7ͫP _;z~ėc־s#k3wNHyhV[}${Ps~RpisW(%VR\WʻT]{uH3zτR ƆOHҫ'9V|1)x9>).R Q^?+PdHJj S#D%\$My N ޕ,{Cyy܀4kkc9l^c"r >y, ?׻5,#'JgZ0pY$~ng ,`'|8l-[mm8~E=[rfMi|-uXX,kpes}^x /`Ғa2{7٬M%z1!Lwnǃ#xA>M&-(fxE]NNO|" F5'{ %s<6nҘi-^f+?\:=M_nrs?f }jY6})B'`QĬ¨=Z/sO㈈vKKV\cB֤"gDʽ?4 ܁ knѭ1JQ+M¼S\|܁%J0+ 琀+erWw|$ \=0`~XZg]`31rIFȩC(#Y?Y=`t#@t* ?Zz3^ X{-CT{9pO!:S|$rC@y." ZaKjHAQx=}w;v40z<A,3fS0Aِ6ٷe?6~-a__!(a7l` ܿ3@דgi<\˷+3%y6%1J[{BnR&PE3Fj%+A)T(!eY0nQ/5;=iڮJza? Ҷ( @7R_Ap`]] e:ʖm")##J\%bЪZ+ f? H l4gkh8NN[t)ϔJs?=P9Ԯȱ/e^~X'.6ΰfsj R fJN7Y2 +zܣ^lITHۥq)#N…/7䫽X/Q6P,,3gih}W(n pη#q♓k{=d\6m GkpKKQ7"74%iDZ1 2~'k*4ƀ[<Dn^yUzD6u+<Юk ԶDХ !shLM\!zfPQg(ײόьUü]?Ee#aؚɿ Je7f b>q/n3Ho"zFaI{ 3K߽P{&?x5z`DlR41j71%^/s+4d~wYKEi4$3@_(onr A kO "0DkG&+bH2F!3"8Nqv9ִThr1LhFǮ |*ӿ} }KA'6& D{0U,v0+ )3ٝ Vw8.Ԍ?D=n#ǭOҷZ\헥KV5 逍,tunm!##$e|q1Rv1Xi  L=+#-mzI};< DG$W/չאi.\Cmcdh›P[.ZܔsZʳqk=X 7͈#+5DBs_),͋kl۹G5^+7/7 lMb1[CH!T;v(%i@ۊ;BڧfpPӔIXMLC.!t^5rbHNTFEu+ӉJ$5$0F&_!DBM/ӣKR5MURmIh=ZuW*~É h58oԲ_h^[cI=__VY*hk%*(ޖ tŔ.>XzP\kyf]yR:5)[DYEnm^Zz8/Acw>ύ+L.Ÿh(VVZ'uqBDqsjy\~2!-Y F{d̝Y3B`8UG "&䣲4 d 1g߻(l3pu_s*E WwSGӑ] `!3J4@c(o-3NRݣV)x6Un{6lgat#Y-d?"ӆ.+|/v!DQtsM~e0\GRje؝ z@re?|΄ete} pkv[gCUqTHQE* J4h GVV ]f+`m!'PneQb/<@1XGR+`!dM!𡀸9/*)/J+,sX*`doqCB4ԮSl#!bx侀ɺU>jCE%8+Y ( œMZ)R!B"E &qb|zڅUFqD+|ƥCy B? -eU I6©GHN0ߧQAaHk+/U0袱-4 7u!dc"چ+lVjLg%Mgk2F(,nKj,(ey~.Bpi!Bb0O{RSs]eBa xZy`krTX-ٮsGC_UYB(⒟ۆ~vgN&F%J^x WU,5('Z.!N*s A@[h,v5i( )U%*PB=%"$ÄH%EXgLNA=wOÕ9hw4C _)LBEEiKAi4RPaPBhmTihRUדfuFmqɫT#%Jjh(ƕa׭-ҼP[ BČ3ŭYx磆Bh wC3+);{jCKbMQLIz!j.U.zy ksW/mLZT1^"N`t •*8~جpX-p*F׷M|5h``<^Sl=VrCByYT8s%]hXtɂsXXyUs4dB둮iGK(_:S>nnn|_??fݿt7Q1bZXeۢB  t" N3\46 '?(+L5m[!@aLZy!4^X 3=浽܃ٯ?}u8ejy<jc@m}Ůu?g~>nP^2DnR9m)ù-b+aE+2U4ܲ\V/S:ſκg`z̀o_Y,Cc/-3787㠾e4[jݨWq6;(,*?nܡ@%Q "Lq}23VZkU&>=5wB輐lJV ZPt23/^r~m>[eVAwl񻼣O{ PuӷOc`i8J%* C ~$wZmTr*1eFrm33dݫ7w`*Rq`Rb+sٝ B]%6Y1pْ`Յ1NCZ")6*p<#W; Wo &0q[o,a~OMUj4ԓz m{A]^;NX)NCf7q^c};z1h6*q@_:p}*~]m'U{,)a7w*,S@Z`SO ݈F{ 7??v:K_LٛzwGA;鷋3pOuOb~}.1jn1gbXCEf/PV~dE7]]Žhx&gmYmb % NXcjTA50.ySl W 6BGf˳O]ڛc/ ;&(Er5tJ9Z:&f%ldߢz=/R&qRfm[3~8"GTOt!>݂s^.W;ᇤ4)5; TbT(ĝhdRL jg6nrw\;g]{@mT7p|fzy&A"8~NM-5sdx1EScfC[8:W\Z\FOZsTh/ ._xl nl<̮&4 }Sfzԫ;n3m7<و,ŁX@;p̴?-q >Nm uSv 8!R)/oAɺ~[tCPܯ o@_%;C fd9!0T=qg3QӢŨ顜k>ftiXXPn^+/0^@ZT|.eϠck4dx+5JR`@@*p͖8Ri"8%)-_X?r(ZҊϒyڨp ~ 82ی/[يs̱g)"n底%SպQ( Q^z?hmP91,j*C{ì"2 *G?og%ɽsIJ!bDNf ϴuo1{>Oן{϶w^8p ;| TtH$`1I1&Jxbl0!BpA5%gk -#'޶k1>PYY3E\%g4(esaHդxԴ2{}Dk/o%fS%Qcq՘a+x@jT{0>P9IwKġʚ9+(a\Ԫ~۪q H)^ H#XPDv5ūxoBLj2ˆ~wF hf 3$j4(P)h *&t Ydt^):5[`cG̦jT(@.2d- cI1c@!yzŹ1M FvFΪiE-5s3|`"^ZBPМ@k"@DлMؘ-6NsӢ5 !S> A <A蕽N9Jx qWM 'CĸnViqLoF5В1CY{ꋢXs,$H,T{#P F b9Q)*{S̫MP}|H:yX,W;QU9h:m';=X{l ׆=E(lGR)jWT]=\"V; 47 (qB@(C c7}'"ũj`V2:?xin0LN3;ڞ{۝CYVYmI}榐-Vm*5s1'FKN7ȑ0mM%󑀬=L· ^mX9Ut;\ҕj+ҍAјH}'2fO֙eE̲7ui1nm:V T;v=]c]%7a,xQ'}$߼rOr|4^=l d lv?5N9܁FVZ#Z"JU-|V[>h:t~3(itSںѩ?2&*3&ZƵ~p{'Ӳ7Q=qNH5!Vy,A{dAtD6dljP)%{V3 );Bm?} j䍠0(j]3ڷ>. qțhڬo9sz~Y -/f#Vk`8;g̖ɞC6\)ǼKɵ}3zrw(Y+Guޣ~y:QT6**x`j7^Nl9V#Ah<67CEj1B7<cnȍ,Oi$LK%hl,q(.zԮ=C>VQ3(u7=߯&*3|d]9'cb jF=>. ~V*xU}T/e 5KK)EQ9or}ƊHiчT0}V}F8U\To| .-ɚ_`zm@UzAΒ -K3mVt?5jMJmЉV5ZF0~Wf^ s+TOz?ѰP1`ڜj| #:(|gPКk%[nš2&kkښ}Τn_Ӈv4k_]]__~㻣ݭ?˳ =&3˳Wl>Տ!'=2熾͟Sb:8^#ɓ`|"7c%WI63/w (9IOc䂢0FIJ._^cI8e˞ DK]\Ls} KpFlPϤ"@*<(Z)M(Ӈs>BQ5EWZU xeK zpnk+ѻ-siS=\+X ?:_}Mf yjs~r?-]ĵo QQޛcN`uߞ]#˽2UDNkjM 쭰w(KNz|4<6Q:iţ,r{\׉>:ӻ<;7(fUٜc駣y?BzoWCKUy+A* k&hLgS=-3%l8xɳ1\ :cTf=V 7\E:<9CO P<*$H@#Q#Va 928 !mV,4M1:Q'u ІZ}%M#$pe#H(WBICكo̓'j1ĿTfݹ'O>ן?'"w~S 0!$=1p(cDS. @D,#*j]0%5ؒUxbR8ibqId^6d,5=k pw~s~d- *,뼞AUPBTαDDYT)ABRiʢqD/: `vVEQqEyuI&9^&Jɨ:ձ^@3%s\XoIQoň$ZC@֊#hTj%ItG,ORP0hcU ,cRG%X;X m欆*xB"%B cMͲ`a"PE UW}s&{4Bzi/OCx[cѨԃM,m\tIo>-E F1#Q& 82 kgL6 ZnadɆ,1MiEFRQxPX`#"q㧳=5?&(͑:e162U, 3dIhU$YÜ T2/' >b焂Grl G,u bs@GhPy+ɹ4UfShA?qkοu !D'z|JN {J hI9g|h'8a  k !RxIaPuYZ9*~{G9{qQ3{D]L Lc.Xwp4`iM]],x {@@D'!$o(䆹#F{EJ*8)+X'Gq%+yU`&D*6=^i> gz㺑_1b/42./*rdYq ߏm]Zrr>}HV}_.n9FTɹ(T[kszKЦ\R&4C {hG1Սi&1zqc&P؂Q]*l7]ܘ/Ǎvk377*]w8up ꝖT#U Q7uГlJUcG,Zԩ2 Ġm-"%$G6fsvW_mrc˥nh*lPqǞifvrL9f5.d͑rn::Msjw۹1-b_{V;m bH3{G[VP\K0o,|f*w@fT]V3{cr+ @@Q5Vc3k2F Y.;Z.5ک^v{gj)}cX=GzpxEhjnB\ ΗR|(u;XX8< #H)S\|f(aIZdk!K0!8U2*2ǬZKM&b$LeH-R#$ +S´ ќ )Vq8~,: JmVOS#}GL2C8*7_9t*lvE:<1l;gu>%>B1M<ĸb'Gxi*\j1 s] WWlV3 ~5J+~~u[luysj ;U7U7 Uƛ=|j;w?~׏q3Jqq `ϩ?N Ӻ0Rwc1Cjl+>FQKߗFf\m=h=qS#!d쥥J\ Ь?tU4T5>W`ڎZ2K#H[TE GkA޽g$lPgdr2u{ xH[,dj~@|9Ϊ3i~z}7cTi-hoǚ1nO79Y辰.=]o\o(ɞ%Xzw \F|.__Jيڬ7߹:_t9""ѻ+s ~-{ K8,6Bٻ6xm-^.`z5/u^id_/ <1IpW(;B /x@ zAv_ t [9 X!Usvt!A [ݖ$hC\;H\.Ji9s纟=$ZV[RbG5Sr>{kn&+с`բ<=dJ"O~hMuZ1Oݵ۞c:H _q.6MbX8`xqt#- Za'GNJ?n2-5(lҘC]%Lo t^syΚWK(\q.G-<4@Fk w^, 4~i_{{@jPZ=0j3H]6{\ealbpV*46NrV-SΐNAJ؅jT/IQM.ZLh)2uyu!]6lop{vz %ΉǠ>|*rIU1#aA@i9U,Ma&؀AltNJ(YW{5hJ/w2 pylqCEn05 X*zTJf|fNք%ݠ"2ӞDz14 l#d=%l2dma\} sr=cqdyɒZ1Qo#0dfa ߦR;..PcYq9x _V'G]r첋]OD8[[TR _e2!O Bca5v5ҕpfՑqNMWd<ևOWR,+Q Xpq^\'~>??kGw;:?tt5WEߏr~b/ΰ>VfתGh3~< e,7{?,[7X{3mL@oHjgG˹b lx{PkY%ol(J Z\TiR4|FKP&\G.*gSc375qnkG9ڤnRkiE.,a01Mrw:MD v`x9[P}<"מ=l%rc`qWW|34No+,sXw-Ż ٨yF"ǫQSSU=ܚN޵hR*LG@zM)5xnUGRIK..X-P],=ǟX}`>~<qTF}Mbufo?)^{#دj ^{z9Kd;$Il$mH>,0K9ꩪ'a5KTN'yFՆZ&#$ZՒ *mJ|- ѩ0cB`#2>%%CCkee4zK~=+\+gl&$[f[ z$b!yRt] k)sf}Ixc]ꖠyddrJ*VWa|eŲ@Ƶ|o*CKԴ؞"@;cƁ3xEhjnk^H-RZ [uZmX̱i9Bxx7P7z۩z'+3lIW㾱& w-qHQ6Ld>s܈lɲҬ=(]Mu2p  }ywom{=#6mcrZ5έ蒼i S15/F';-Ǵw\ A'%C^VS7i93Jxb"KmWsl@2}e?~tlO?Ij<n(xDQ94HqiSr4f!n7VA.]".oy .`n7N SlPS_<=e= aԖSׂ HG%-7twE,˯޾zsė;:yCp">a&oaȤ !!"ݰNrxH&Q%6u,TuވXK~NW}Ff0cvƗ&IѡLv"<u"6cH4 &$8 8e gǂ04JM*ALd7݊~#t@hԍvRl_G 8vߡ8J5Ԓzm\$2*Utjjaq \CVH)oO0Ks﵈'},LזCi&>0@b#Q]fdF 4 Ey))Q|/9-AZke:%Nb^]a|W!5gBAQC!7۵ /V-凟ޖXΐg70!3~f°#g*1 $\mJ9}sZW-c&ZޙVdz)FR fl1rc8gO>pS\tv+/` zXq˙ԛgd&uMJu[Eޤ(,ZsN%gH]B–bA J26 ؝3(Q60nVrTvk`KGQ?Ilߌp*㪥h (IQibK9DlAŝ<`: 4&o!AE";6L*F+ϴCM7 N'*- F䲯$jW(s4/.v \~PKv,,G-e 6a6_78 (3&kzR:Qw[>viJ(%i @݁D5]@j!16p߫#g/b<|7"尛[IЀ6ܽc%+ڭ'xDZ(fQZT4Np \j:_cOFVš JE]ƙ2gw^aר 6W2i[^,u3!72{.!@ X3?5bTD cq\*st\&hRTnH8Vo`:.rM.ձ_8s;O2q]K)İ4#zlt^zZ_n/g IjK-g$$bqxʽ1-x/$c9Ԭ"cזd0w1epd[ɂ!8PS/ЊP&!C@&_V U) 4xd{Sv[/9/I9b.XGXWqan9Ƃ'&)aASC>\Nz4M_T]>ZѾ-4@ȖѠ.K8:=$Ï8L~g9W(߯=焈jLRypphK- FO^m% qkBm)֥xi=HSRհd7SȨ$O%j}ȋI$^Yf&&xlxxrwu)^r>?OŐ)KJmQ2a"c .a|qvlepר||/'lPg4\PtP CȶOJ- T7e:pzvljX;x..l#-_iNtaEb1 )^Dz%>Lg•pQd $ rހa!K.dNFC.S()ډ=e {c]/F%ꑢ)? H \I` 4_R'm$9 .ךҘJ )s}̵q+f`4"1 C1n h%@%9vg%<L/nߴC3WnIl:K"%sȴN1]x 6|xғt2v\(((b#9nF<ҧ!DƴE#w.;R}m&ŔcA]ttM_}DX`T%˨>J>8c2qxoZVay~P(0&j%l+ xlXEk *\ &ijLx5pQ#/P{coRq2xPtT9"q G%ΰ hnF-}-JGd42%p eTE~.1(ڵID|pOWۇwoF;Hj$a2d=>dUb''؝1ޛD_Cͭa3]SQ[Fkό*%`t;>qc0BB܎b3,kq>48mMք򌛶jnha_sp蕧iښC;4_8 fDلvw>'{$ΖC8NhL"13BU=mhgˈEY0+ Lh{KOxUMp߾j͇WwƒxVܯN#WK^Mqgܿ}5jWWiDZ/w?˿H#x[QGޛxeg|/maͩbק=;;Sl[m 7ԖTE~??k}O=JMO$N3C`v<b58(߿$w~E$Q"D8 =HC$o/Pqϵl13 ST5RX>~$g=|zԕX nU; (vqmDAOƇ &<"Mٳkž8]YsG+ =DudeV)181ˌQ'Ea)4vh,Fw_fʰ)t:"R22g89OuγKk a$PTt$bI2!&J)*: SL*J1o 95k~@ދ+,NIJ t,;GKvp(TW8Dv|5ˊÔ#kcUՙmL[}\g7Rpq|- 1DY|I[ ~XA9'(؏DQJ(kDu*DVWǷ6m C&1ʞFaL|d(ZE!SM)"@UmHg:f59꓄q"LGK6-ٵ/j S߾mBO@}uf=.P_4btĖZx}%-,A'gBP|Qhrف$ 7)"GǤ4Rm1>Ue'Yg$FϤ]-P|ě##9pg,7K$h_O[467ߨcNw[w0v٭{8d ׹dO&5>qd)%[=&!{x#^aCly?^&~P<56$xv|"d Ogo~(m#c9 f,/,G#pNQU D;8LDķ_\|<:>rr>;T5$b$ҺAP}1X I%˨$FdUM^-56 g=Gai>[? j' />6yWzkI)6oq?7+ȧgG'c@G6^^^e A UJ8BfPF78e 3,m E!Ya)+pd"YĂ#M C FZhSLJ~eEF%wF$nRh4eނxaMf~t90~bD'J{Up&g'suc2j4 +Pd.{]*ru]?/Oxyڢ䳎Ru!-A&+%ޫ`J.8(s>(+kP61xb$!&׎lWL9f0|efTT^Bhki@j(Y%b:4#5cl U%g)S-EF;;9>,.GBO>/ѓؗ1JT R.AFUf,`D#͒=(EglQ!p9Rw[XjjnAzrDK̗T8w|e{g M*6AĄ:lQY)BC: N:le-FtFKUCnǨ#*m}Iѫ2=G,Ȇfg:OKXsqvxl4$]+H9/oc-+휈|H6D9oJhsNk)N9Xqr`LWIj*1NEٳq3GB&c`>VRL!:DN!✒51&噘IZNQDuGrn~/9T:ET0m-_` t K f-^c2e>0l"C5(#CRV֌! UG8%~)m:&K:g󁤁TBQ}h` YXr#2`8,BQ17kCi)ٻޏ|9<m-!B}X.IDUA;KRD$^xZxM}TC[Bĸpj#|8[n#9_.-)?",+ڬQZ[bqVxYLZqFVvdqrdK@B1mK jRȪ 5Gp QcNw ܚy.>?9F~őy9iZs@ ,Rh2"dlK,zP[A)G&^Ḡ24/.6RK2K6"8q ScMh].uu::I՘$:5}3rWBCxaD tՉ(^f ^vjf̬ut>)2YU$ХBX2fH*b1ZW@]v9йv#}PG2aaV1>Ǫvt;9y&A8l0;i`.cIywo{0MPɷZ[֘wN:y7q89J˿g؀,rTl>ZQbc:Yh,_b$S;*j FBC&B(+Ctt4izF0ޥ9d"t6Yض.NSdAsZO;{,CSYTOqmkSod칥߱!I^O=OՃp gˎ+Q׉|am|䗯?\ yC%b|gukUEe ;`-~:Bv>#ܓT #UN/rV/,tuujp! i%I"%QARqŚXk]v]C~vmՏȮ;:.-RExn7t/[scBh iKb/+l18.XRm֮?LSE"}v[qm \wʚN]?Y.my'6Ί,sOq*o;}?̟F|s3vt|aƲ\̎lb9HͿ7qsZWzhaGlӏd1wś)>٫כlg~L:*2<b4nk#'߽ ۄ]&LNh:}2h"_ ԯgUmmS3)4X1ѯDO<%ƼtfPc1֝Z֋KԚ)|g \F}qFVIcH(Nv̓g3uWXk-@XPމYÁȴGcvXE3URWcx*>$X;V!jt{ҋKJkD)lkJr^5"^{cozMX mB#\\7q ;)7%d=[Ќ*θ=ʱQHZNNBi~dou7mBx pڈ(kj!HCϬDCw<[~ޟOT@%RBĐT-V:W7qWhm;=J7vYn1jǗĢ/pf-9T5߱'Ĩ^y72wK^3MS\lDT-ciCV2m-zve֚>ON+y7zNJ[6lۦz~bo廊o6n^PqJyΘQmD7FMrLoV37zAwN8~7;wc-+=6=ˢM;an}9i7|fb,l^o;<μڃ?} 'WI<(9v7[uOXn4Xq9 M Eވ+ V4_gv8vlhESnPp Ο*cTA%Yu{ UldlgkƔxNhݟSȍxR>Ҵqczn|q3~}$]7 iteN7pV`U`DgÇLyMx(_y76FKח#}om3 YcqnB)aSm8GJ>ǡ6A) *|W)OGIdr '9X ӑtgPFXFً w,iO~Q=eڴd94HkC IQU]W1k_#],+5_,y8@P.( VAW1%CG -XzpYg6tgAAŠ0ZF,,;8˖7%KBVm(cu繥լVǝxȿ:UӓΎ'?Vⴔg9{lt}R+ 4kpFbg4c!sy SZ>.f-7/m][QѣϪ.c KK$aYB;c?Tu"t} RaESH|{Ok~v;?Slϟ Ra#qtzt]GdV2~l2y;Q-i[AmdK#ώ 鵣pֻ xGAn< wnT=xI~ ejn0ޠ|8+ʶ \.œh;kwި!\O>!AzqT=.?KY?W_ߟ%폩V4Iǃތ./˹o}||gLBe/G+Ҟ>9J58qQ(_ѻ+YJ=Z4w+Ws )6+nr4"A(rh»t}hc%dN򫿟{R[iZ:W&xp=y1H%jy0U+%1W\7&E| H_̵G~y%ؤH&,AbHN+#K ͳ,kH3HHCg04μkAݸ*PJZT^/;9 iƜ`0klKkAxg$ȒgAdY&Ĉ<"a(:*LlK(E&`SgWٴ=~@0M4>?ѾI3#;e4Wg8h^NJM͕*j^0ڊ,+l#BXfbFcaD,3C9aHΑyřM8'fIZt^<{@yJt !,E  H, >,Q"6DCC0p0[xIU4TZMta3f֕| LӚNGn<_OFizJݤ`6l2G`~OqI{ V"#Vʥ{0]ondL"wiDPe֪8veBTo 0zwLfǿ^dBl!"Rze&RAK wIġ9뜦~ c %7pCľ"%с wOoܕ@U?$-##vER9h߾-]vp,3&e~aiVvDawh;p>zsFarzƣ>XRn K1_T3TbB$:':'eU*W*0XeI)!ꡙ9R9(Y|cT~xE j[~f]=ffu-uDY$F p,)4)! .FktdGϾW }n8/.P[TD”-Ȓ#U M4Ggӥ6mX NYᩈ$8D)D#Aq :uv"Fcvr8QՇ>g@а{v uyx9jsPq$axT>`Dw ySJkRm5fOHw*YZԋiwz,a̔dT%3*)U33ei0ْ0tLK;锳0BsmPzisNaIAD~eB]oFoh $ +"=s̳(E6`B\*7@BU`=) lH;Fe='j}7<餎yYIP)N" IxZ+D:P[ vZAyL02go A7Nii !HR F娲 %fdc55(2AqұtJQGgl/ mjD+L՝ny&5= Ɍ ;؂ iJ%S۹Vhv#*%:dg`\XŘ 94v1rRYRHstR,^2DP:\SعVv%V{_`(I{W,F $D{@ǍJ@A6HK$3u$]0f`7Ѱ7'="m s4l`[m+fX.Cq>`U23NXQ) ײ1*gOE8XYIG-nG5UtK[K KzfCsܓS5n./OAPm?SMЌVhZ1s@=xxЯ8*׍Ķ=fVʸPw͸z=]\tiϷܰmb(264׀Z~ OR^+dpn #vs DLrJ1f*L+Y2} h6j{g;_ڥ s0lj!VAྲྀj[JvnXIQAtnP4elāa`%3tEtr4AXzVIXil5&"czlvҰWhnSMvЀ{Y;g 6Z \0E"] TNdDx\\fxА{SgO+:Lxu ǭ[l$(5I&qOa,~̈́Ņ 9͓̀= 9좳R{HI1I/@U>F2pU(<RCEHV9-oCb :c}R @E+tl-4kjumzMmbvxd@ HLdVK9FaD`JȦVzA<_j!{% V֓D]6h-)2XZnYEF;ScKB-2@KrKeF+ XHbҐjf2\R9& "qMАXX%._)l "W+XiO~7jI[=6BIG" 9m`O!Eo@Q+ ? %~-w( v |^>FƖc0N[>(=<- 5£Ir%a`4zAĆ`K;llKi8Xujݒܥͯ۵~Zj4Gm.,]_I!v}br=]頋7'odB,Q6`kYӄ$s:Uoդo٫*?{ƍ/d %UyH>[NRq[*؊)Q&)9ުm" G`3Cڦ$3n4\"oL6X|xq7w^F's ϊĞJXjٝc)«TUٱ]LQTtb#Rrq9Uԇ>|᫪>\yĥUFPb, 66XDXe=ya( YPGe<5+kXq6 \I"2|vC,d%SN`Z,PI zH,Mq,)s*,9fV쫆@lj) e)??ǶZP #q ˖XlE0j=e0oKAA 1"CI>=#`e=ڀ̡A-@&9fK0[<1E.5 f2ZIml038 #=lr& }ENb"df(`u 썳h&HPxcd"w-S_S*9%8E@F|Pk~Diutqq zḾ^A 9} 5L\]Hy[w [ⳙo}ǸvWVB !l3],N&™[ ?!0ORh4n0chXL7B) N n+{ l씷o=nʛ+C$z7͋?DoRkɁꭝL2M8=>BuA: 4vMi`LNi4!(K 鼽>T #z2 ={O;<~7YgUgНu'dٱuKԆ-}FVdvH;SFIw,JTmHy] H-LzgB%1Y9\ @ .)Yn_jTA\U xQۙ-*pVz+:_Z%kLj&ĘbE !wEG4@)LnFdCW{5╊R_{p~xݡ .h\TÉBTtN]> T΅qʅ SB^EoU^EoU[[-"j-cB±@c1?Ψqjva<$ߖ(DnYR=+S"+ի^E}*WU}uSyvP aQ9JI&Ωqjf9U+DOR -@`R–$:$4#O,80) uDFsp DWbE `7=ߣ7@b֡Vʋu-EKjV TT5= C{6V#iw6۸7L9L"⸴QF2h \k-,6VgFs؃4˄S/ : N*}>X8ϺYe3>l]Î!յ?֮%?$QLDQ+Āժ@U4j$oW#YZ#>w'Lh"v $Eκ;mA0cKe [,CAa{=`h0HDe%g%c7.oZ`B%놋\;.R Y[][XTRuRl~ԓ_P;RqvUz N*GiL-C@-Åzzvq} >zko73/bgoe{:l%&79+ToS[!=N) }2`lq! pH̒p~fjv{])Z~nIMB8koZoFw{[͢?˷݌e$bZMnY>f|P?Y"vl bT~AOQS|L<ׂIb %GXXr('K ).l +i pUFJ8|DW|fO|3vg39.gK^\d_VfqE%q3jx }"BpuHu6T$+sdd0x}5nd9jN󮨞$JϚuBBJ rO^`ifqJ Xy8;gzw8Ѭ)7YZ8ĊfIOf!܉opq D? nS=YtoD?Fpk5߰;q'%"v}Zӿٛ@(u<@.V.Gtoe}=S.w,; Kz=@ɞ]`F X7E:Bi;1t'PQT|7֡1eY*:V[UZ?\zyyPsn"%&r/6 ˥*^f|1ww-nA~gQ!Ի@?%+!k$M >H#iZ"b00|φA;($3iصYC"o 5Ԉ5LNF$'5]5$֐ZCZ弆JNPHFk|OL9+, pfoyeKk:ֳfLA#K)ؿ ]nos`J0O+ ڃO3b22VL18>D-39Jhq}K>!97aVA)^Z@cwW-ll1bn@ ùF١ fXWQ5z^CktYkHvN{VRkT8ASR#.sb2쩨䍟Oݬp~:%&i,Q!0[”Bvo>N^m.׻gɠD )Ѐ"tuZ 1{#@;s.Q"z5=PRMhQsR`_eLHH ^/c.NAqn#ӞK] l KY*v` zX3.&`)"nSXmIkgad$z8Ha ]bk5D;'-6k;(*+4 @RU|1'|6+ g).TX"x!_ ]+9 T:Ve9`$&M,h>pO 2a/2a'?rZb(#< qð+r#(bwјĬ ᱆Y-WwcLuYi6VIX.n#@i87c1og_ eeNc5cA<H#075jwqԚ8bp]"!ЕB4BD+j% G퇯DW?ڡlǞz$AZ" EsW\u"]5M. ojw4$I6M؈jd7J+4Ι JRh%l9 2^H)9S \s^iC8a`8()v+ b9[UDŽhR7RU S`Ձ)~ *t~a܌6JF{|H~*YPi>aAI'" ٫'=2q_@[SMp1ĪU[+{8äghZꍽ?L鹛RbGwTb Қ?ˍrAz76}]uJ{Oj~jEIvғU{mHF}!OyG晐ޟ8ؒ~1G?)F@ѵxzICp;LI I+ٌ'w|lJ \YMjclF-%V36SGsbE4ZIw\w}jɊzk$ڳg#8 ''">`\{ " @۸轋F.7}{.at E5djz]|]n% #_-f AY~A,^A]A:/>FH p+tTH^;/`3O7p%/ڋ 4 ̴)D{9Jk r)5"h19j.ԃb*c3,<8o&q]߅@Kpub"`:\/%h6peJXw#3I!MgVXɜ*6Cżu ƢEONp:h4~YK9~\ϯƟ,:jNہ؛[.wݻQ^,6܍5<{q`k{2B]~zJvhdx߿zDN[B4!eʛZ60zqgc2| jgnSH!བྷo~7]iYl!L%PI+Zro#F|T;b 3) lh $>HBC(G[ *؋q&)%Qv*h.X;R 3-`K4ϷFUq#);~#L?UA9B'r,f|z{:{Ǘw8'KJbIzEߙ [!'%Ճ%A襞0fL]G/d=Ce[*D$QUgoo^}ߙ0_mKxbxL̽I!"W ~/.\M~&ҏ1p 9å-AeŎ>ՃBfnDIIplzDͽ/VbU>,jo}r8J";_. i2M/b,PE{~q޿X_9oVb`uKKΝ|hz" l+qŦ֐Et9wmJ(b 9o"H*Ʊ]Io/--+q--S9p؎ay.X][z );H Tφv MytcN.Cw)=}s|O= 1fqsзCo2:8nAv~h_#?yVXhR; ;a5+)c;%VnfLY 3 }3K:u*#'Yy\O^'\$He,v=i=ʁ8HEڪ/av5h}=jQ^ ·##@XM1>kߴGW᷏u7A}>Ne.|qpҽO3o};lM>OWS\zS{ˎi_NyM-m(=9m*აu󵕘eBck84uԷד/}[?}n(2WcN5g}=>c7]j5\U*{@_"K <*ܱ88!os;:͸=t:՜~{WnB1Y坿9~LӞ^m^t>}O{.|i&+J/t_~Vo{ݽ1Zu}1$#?(^o%Ec"p2/:Nt2yiW~ZgShoQ}73{Ebp8x7D*~~ #7W0>ޥ]z 28~5Nli|gɳ>}jטa9ÕYp\ڲ159tYxa3~XoL,@bL,V 䉥uN[abq4tg#a6n?DsCc8e4BlV}׎28h />|cL4ͅa}R峦gpZsreOR~Pnt lܑ+KUu?u iPԽ vj%zfHj7]kt;=&0T ȉ8%w3D~i>sP.uȍDEs/)PvgyڻV)laq%k.]q۩d%k.ulqޠFTJ1,q)6I,1 yb\R"QM K3#HEe{lz]d%k.?K\Kv.Nhu&z\t\"PXIZ\Rs\ŶTyf\l B4&z2tZRFL,־DENʒ*#U4Ƃ6ʺQG*_cB1[5 \PڙD4Xh,=*$4"U 1 >B C} }b礪j=_-pE-WtA}N.E}7 As1ݗgcNkjJ .C('Gno3,{XܺVjj,v7B)mJv-=~CWov]g #KCD⹏4(bh#(@1o2KKS/qZҝ,.%Q\ 䭢RXLf5܂Iޜf5&L}|8iR&D$RaɰOI*E\IK Z@]F?K7\8ݧ>5,!?vPsȚC#u`jQD<@ im$cñi1H)Zd ( NKNCLeNC鑌;rA7Si~-]e,b`)vf4Եl.xƛy]\A#td)X.cO@X8MK?TW?Z2oEhm;_\{la{׀)\ lk-xK;qSQ,U5= 򱝕Bd@j\ KUqRfrʮw-i *1&g(>/?6~ ! ٌEh|? -Hg8~$Yyr}\g\݋ 9WH$jFnFq XD+T5 i,nΉQEFp<6b0]`+5J]KӀ+u7Tt91p7'kؤWf{WV fKPzTe#H%IK)jyh-2 Rn82cQJ9< LȩYXTσK7Y挊9|q"%MbO&2cNϰ+,Sk# )n9}x|=9`',0ֳXc=3ּ^UiHNN$ׄjDJ g)&%u }2e_^[D*mdSP` ',㈹E:.hex=|mNL"H˓؀͓ O 30DX1$k"$IA;$o^~E_YQF~uqBȯq^-fIČ3 D+#Vzm!ĒijDkITXL0F{9gKCOLQ"cUgvRH&qqf=Mo$( MV-7TWU/x]@MzE9θ`õ7·7fӎƯ> ;.B޾qN'us!uC4 RxD-=}0Q\D7R1C/%pT\'x<5uˤ8aK8aK·%l4-vHӲ f0[;0ZwA`z1Es=+|C ~!Q;E~P ,B/!ܳp#=7]*,?5,g/gѳdOσ\vr6rt'VOR; >U6V*5P5 /ZrO+փ]7Ȓp@&7Bkj㓣n0N9VΫC4PzVI>4yrM95EPf^>qq>0$ +H{^EZQխ{HA\󱥛y\w5@Pѵ ӕW&iӏ eaP a2)ƴ7.+ &TV^8n<Ctr7j6S!˗[OTg?F=-w},ϋtIFahJl|WX8ZAҟc H+\,^ gx1ϜAeҠIzJZ7I`$B41d XwrKh0 3A&!@1uP yQ0M56&օ9#+sCJ )Frf)LRgfz; $_PFd:5PFٺ_ԏYgRT 1X֏2)aN'tqMD!k$M֙;OL=o;S/׫)0IGZo,X/P#{F)P;[9rzhΙ_{g]W@o͛]>ym>YAr dAA7CAu/YꐦJdyϮcHLS|PEZc_684s;% QǾUxa<l|/[?K@89HzF;|zPA'l_ +P٣g7hdhEV]x-W})!@]\j au3wh mTz(qҟR-5-U;A@0`Zܠs5b9ԁt1J3_̡Osup K 1f@<^֥{SwZ^/s i!VØxHy$j:Aǚ; MR Xp* !d+fTB4Lh{l{4>`ȂѴ]7[~硂 d{]sQ3 #7J:HFVSLA!drB@9T!&'q7K@}eIUQ\fh3:AжEWW&",0_PmrK0*xKҺDiۍ_Yƻ[?;]~+AR2v9E6oFd>Ji=6?rɹؼ8e Ts֚ӓ;ek6G]?=ypмpTG#wrV^>7]^sDYX^ȿ'd4P0ZЎB9hsrTp(=o #Kˤq1(tol Bd toA B Pj2i`4p`vƘ~z, 4y:>ܽD ӜM>60r{Еf>jky>A1bcQG - ?y6uO3Я0|^,[qKXw:H_^yzbS;}d=ZxKU'z59nV=89Vp>6[=ޮm_OjIՎĤ}+[[_gkfp2a,`[?=91}-U4lj8]כL[vkjP gY{@i$QATF8MÏ )g8@<Z2j9sj9Ws=^RBhk~zڎNcDW=Y։,~-8jxhybvrtŹn577ge=|ptظե:!UJRی[JS;R!KZ1!sv)u\l^T\:Dv+-B4プ &FGSq俶}je{4 {i UB[בֿF#0YU(* 0y{_"> C/Hm_0x7 Br`󦈽D|ʉlV5G@ 1 B4{G"TōesmPD1FJEg2, %j#Iv ||dUI%Qn.ӿlv~.%vz-޽9|_\ {]NS{׫p|) _rf/]_Ιvl xD~,}S?K;떵B!RBD[j[i`˰Wqve?& :$*[NY?LND[<8B)[s`к-):~ ГA?><x/IѰeUy a$C$CQdT40k€E& O*m$|5&j*ooKIV!&XIzwNq7w?\R4p)+ĀO\o]sFWXpy?XJ9^jnJ5Xsr3u_'Le")Id+#I&с>HՊ y Ypχ)y/WGTkTpO6?I~F+?m~vFeMd.mo"0 6tndXH|3ܖhrB? *ʣyxZ(X"g"] \``Bj=McWQEs&6ʃ" t$Pj]-uWV=XILe88ybHd`uAV0pUԺUM2rwIQŀND e]li&`aiƫAF .CJ̬bڵK!6M۸֙/"{!e\!T6ɻyIxAf&2ZH`Ž $3?'q2R1rcN$A+ Z*FdF0 ՟9J@z d]#7z+4*' U`nܽ'i)3Y|3 0צ?+0L w>_HoïNu%-N}K*'`?A/S]”}씽xGeV{,sD2%'/M̕[J ' PQIP)P>Ť~u^O&TFNَzqau}Z> fiٽt?]Ooʄ¼ɷӢJFn^VBvLW%+X WUIJ' ?05im'( j?.`jă3?Y0h0TDB'&h& ѡt`@סLη}0[, q;`Ə[ffi0CuXb{3ϫx.dT0昂s>QA*t(Sj2]H5܅L7qv^vѺ{5a>}u)W&Z~~i0W0?B~^%hC:A z(jt} D^8JŎN }8B0=cK"RownB(|$2t5&悋{)FD:ūWS~X-^TPqKՊ{) r|XQd'0EX:'|-$0z aE ƸkPh䘫c'=,[Rqn^%(|Y7N3DP؋[?ճ#w_ TXg!g)E:`'hE,8=ĂcN(.3-sc'&Pb9}oSn1/mKvd d>J(l-Cʃǁٞ*̗SW!VeT (,Up?z >unz&b&,c"b(-ŐZDZ!vJhbAJOˑ},a4B$FۘW1:T&H$V!+e ) 3xr/n=w{ H3봏)A*~yIcO!;oBiB]xi#(sYX^^+v!$H7R(/R) Mҥmߡ 0S @M}8U(:]g5  &L-)HI(B2 i. 3/ PA =[v!²̀CH]Q`H77 ,c0_鉗 djp$^"a 97%Ύ&Mgo=Rvƈ{Cw(<\(>gܐc 9M\>#ןI8fo- ߒbpF=&2Ǟy28get6#-4]\9KGׯZY2|rbc$ 5eȗ%s#r>`|P4>E"bu%n×&@X Դ)dJ&A[knN{*enppUm>6e{/ܴIpT= V תZF08inA0ΰ'j';тԵ.NWU]ά,,l'Юf=}loͯ9H.bRh?͞ XVA Ѡ(sWEƤi=f)Z6qFD;b֌+f"VaEB1H,'RȟlGR%S+)੨ 0 WD|7+?ٟ;va (ç]E#vz?RYM|gۡ][41Γ):5{ q|dw>uho/;!㑧roO'|n,"qru[pzӱݕTOShn~a4w߁|Q^/T7moFcYoqAz _!*GUo_& gSI)pEC[Sc^wS {RjIO{ 5_6lb.}IЦYCV}3@9 U,Q,8v~0K$Loh4뜆ݏ*7D:YGGi7Olr l9S r*-ppj_ 'pCS^Nuuy[Cc:Y.;f6c_qLۢm"Z7M  b3:'39*Ε=S*ha%yvj@[J?]ĈDZ5iogCpl0&%Z (EfiFxC.?[mp2L񥟂b~0L*7v3p@RU‘5 w ޅ $hUi"lRphL*lzd炣jc(Vf) X%r్.bƑJP_bmI4^B+X1ifi#bX$.;Ӝõr$7 zbڱ*XJ~Vulu$GEpS_ Zֳu=kB] F0 36ZCgon> 9wxwLA] NU`F3Mx T]2p.B~ dqF^*+IyNԂ͚/UWܟe ]bঠH3/=gb(LCﰗ2K( O(g 0yŌ$&v1 g:+ہKLUa2" LTT5M@YB Z~?ƿ ކ^(vMYZgj"I3R\}۪P沘su@@tY@3b]JΤGQ+JQT*N(9mz2AmeHH[6*5u౵۳wHڍQ0k]r)Lő2q뽉>KX.7Ɠx|37_/:˸>X!sgy|5<ܟ2/ހQ{}?vNws*؜|fp;g~4GhY3%jS r[ʐaɷjpv5X|B:-026d> (dG.}xz5Le t]pyܳ5:N(6[[*KlFX*S'݅l lP|[`P Y+a39ձ&4$풝 "L. 'LH-DY6a+cDc-6R"pz&E\pnƘrs?QLӪ RffI6+,}؝rkU)ۙu$㲓̇MJh4,yH)JB FBׯ_CrC,DIHZ!U[;ί/0ꣲכ;nIFYݑpGA൵H莄>bPݑXܷoXվo{CbOr&D}i]u[?Ԍb@ԚDIHkqMpBX~*["JbNLdF~u~ԚPR̂tܛ&W.6լ:Ϭc:#v-,|(βjpu2 6[z}m(Puu>[$n}7/?Jg<DZAj8 aJ +bL ?L-y/+wn>tH8#O{7j]xA~{ÂA2(B#F5Y3Qu6FtA,{=Wgɕ3~qUP`p( N_C.0?O?<.8!,#.| |#S0W˧wŔۚOJ2Fs\b fst'e aya1I⮁D6O;lӳyl2" Yb]zViry=tݖuXƟxwG-@NU뻟_1PyEx.ν>J^/JM}iA&n4M^vd#V;--`FZZ&ҪH- MeC \1NyM'q(*1) uv﬌m$cѤ-Qgeb$@1qDBsL혠%KR&m={)?w"tTH<$!;Y3V[::Qt|3eBͮ,w_q~݉4ޕi>>'HQ_E2kdBN(*Sh;z griw 7Tt8Ӌh~=CJEPe))V[i} E)U0#(N66QJI2IAZP֨D1W7?U1VrlWn;5fW|o*F:<_IHea0rw"$Ӂ2"ٟ!cC Cww;cۀ(Y d%x5s0I|EA]7>ATI?7/]ɍF}"{˳[r;,GXU؋ "Dلb:T8.+dfǂ5[F,atvx6md>jO&#ai::j/5vV%|j9;M,fg/U-Lȣ>!B`M%u y&!<bTo#O:EX~ޙ͎Yd'+aLJv'+MCSZʓvsZAtw0Fj!coL!USwD %YyH nzGL˧,v煮Z c v@}c%[PHKg%w`sxI[8ԗ$T;8)Unoanj$_tZk_`WZ== j:C#IWKDQ}RJhI?H+!bEiiIY!mfL%bn.ad)q*7UX$w^FDAPUD2r7:/Q^ F ;/ѽD$LB*[ƞ"ޢ ͉Nʚܐnю@t>04`ں0Raezi}se.w|G|x6DWmcUCVU>֬c0c(LP$ABE w\&1#;HxOED(F'1b]PT.+bE\y%Tn%6դcLܧ0Z^dh*/{3*Xϟ J"e8BÌ9({9[o:Ni)NZA"UbjiƑ1A9 "` đ1)R_bgwo.|hIQޛv-p9|q4 ,?pa`(Jݏ!ĬT`R*Mq*C'Muyb ]yXSY^M%7J\RMVǠA>ٕdk}6cmS,?>Y2_,nV9ݷn&Vi/<j텳 (#7'&e/0AۿU<Ł칱UcB>VYic,]h}O~6H..Gh1b R&ټ?gpcXIR=j5{V?ՔQެ?v5헷Ș)WM?'Z̓BTL/4oj5:$HK4fCЛmt,B}V#7蟇[pwY5nj`NMZh fPDɕH|b 3csVtf,+$C!B~ܶW۠ŏ_O'ZMV4OS ?g|9 (y‘q\N&`nM$<;e/0ٌVpg%SC584QMsz_SA88C?ƭ;kJl^nz`mGǘj9kRCmPkhfP>!6y&S;<*wB3lQ[؎#'\!$BKŐd8zЄ`mհ忷^#`/,m_BXtI4 V.qa"%86wnx[k$y?`)Kŧl1;fq|t i[cBf@#EYGQ>I,XE'1GEB-ˁ舓ׄ -2 p[&4?E`P7 AFbUt 3*0NI,!Hiaf LOs09~3d'N@tuwmh}31)x dk?Q2wrP^@3[,P>Wi|fKqo~u 3ܢe Ng/ 7.|RVSl^V )+͗i\CLI]>OꏇSYT~<&$R^7"cx8|<2 ~ =. =#4ttStڨ[龍^hm I)8ē UZ^ϘD'/ueF rM22Rəlj9Nӑ~pˍ3dNBF ?T ^!h,*bܢ폙$Hf +s S5da-֝.1`7TWa҈;aڎ۩yIhSu'3#wlCr?e=G.~%sR=nY> C`{2<QvUR$"E HZE^[C"?<_7&ۮVQiHX l8?~}?_\oK萁LJ}A/OQ`()Q{zw 詠M>ކQFGdDԕKW$t *nbgu\vYAׇ?>[,yGuy%yIEza.7ݝ]BT3A<2Dp%srNQEb_`$đ\ru'Un/Ero饂H耘X6ԔcUԵ^F8I^C(ϳ6U0zc#C0uZ~KHgw&xH!k^/INC. q@ î]3 dT}ֹ9Xw<8 D8;́]pF4}U6З 28U u-rϰeQ-ӭ? ~72X Bi*kp.!Nwa!,. .Ty;xs+T)Qű4S'LH 4^NGE}n"? T (~ nWHX,[a!uwȱ>4U+I@:F9s\\H#ɰtX79A׿JysP! K&||Q$('{!;U'JON;!~ݵ%=ȑ~ cqVhZ^EwMwev#;tkA(k{=峙[6[4_G?5P@7Co)ޑfyi~-b yzʸ;PL~$m臧\zٿ7ٿtkՎzZJq ` lD IǓ#M(Dɳ w+4G[{ǵF["ŰW?Q&3_D2X4l5;®\ 5])@ z8б時`w j3r? /!9ctv m u6yR";Lz=pg<϶J*74TGֵMH{AcJ 001#B=!v$f^ޒ'DOx2ĄGHC1}G qA8cnv[8~NJ4C/oN Lm<뒯#xNA%H9X0H(K`Ô3Xsp9!Pr]JAa-3OddJFYz 쓰?h .ڱ)-_-?QA ZieOYz>ӛy/"UYm8fb4)4jh+'ylo'UhrH )eDLd@a,BɸOQ(>/~tfIcƛu}ZCR9hm<@9a6uOb(\nW=~(T ߓS drH O\Y4gꗎz^@)8 P$i 2U0HDEc$A%Dtavx 2K'\MjGS\hi5_F~$T/ bQHOBg#JdQ#B@@62Z+0ߔ׮ii1/ &Zȶ% =ZضC$yRI(cDB!L'TgbTD!a y! c& r }mgKK KXohO˲$bFt$b֔1 0a2$%2ek14%2F("+~E܇-Du-ae5à㉇\m}j2[Wz\:Ʈ;r[܅K }uC~WZ<_g* ٹ]`}?FP=&''?gQ.3u}ǛWosuvJ?LrSCLclMv_ߚeG#;?ǷonbETV/QiKӺft-?[n/uh^Kd^oV:nGfnx5\ 2GhǠAM~ZvUvx}sXp$j7#=ޮQ^$ft5xo٪Ci]ڳF>-?9P.rJt$}R{wIܓFUxy9 Msrtry⫧2ހ 3~ON+nqvaY ޝGVg{ Wˉ)_ Ջ:W>ǛD&yZ_/{ǐR`ҩ^ndWW`Yx1r~;|[X\h4V<\GLhSUz9n0bXd+ w0 #ȼb=yJec* sx0s)نw\m9lrfj"iMBM.IB P#`PiKt_&"$Rn -3KЇK<1Cw!8u޷S:M!`汳)}rVc7H"V}l>1.p>/'K"RP#flDhQ׺ܐ,r!#Or>yʠAA| \Z?%=&.cWi{ SC)M2>v6Ag(L)R;~qNieD~]"N~qZr:I8[knvW?XD'!x G`L=? 12&Id=j2sSF|}_`W=d8BDP9TEq*[w)Pƛ%(qGY2lA1 f0,ExJ,j#ډXmRYS@ O ^D$ Љ=h!ACcc($EHr!D=TTb " CPP_y;$kƽ7v=&@TR/y^6"ք9A}o vg!L_K딵"=mn7dXtɮ"أюu᧙]i2C ׾vS珫OI#4!R wFX IgL}"ީ60¾(<6ŴTbjhF 4ٵV.5}-aI{<2|Dda{r0d +BH˱:%M- 'S3 p:`[;{9 ])Ćq1ħ *#k@AfI\[Mk}4_度NN4 EV9U8g:54opOKsK;r\R f?*1XYnV;].C(!BpZw6k%=8KxNsO҇m͍t@ծR}Lq,ݢcJ2ܟAGG&B1"#ѩXt'=!s7R  |]^0Mz=eAG*9U(?StGa\Έ=Ín t1ZivPnEJ\?ħɽ63]49O/D!"p (^+h`w8)㘸ѓ+iPz7C99 >z]KYfC_K.Ro?Ki! 6l)9MZC "맻K a韷\gRlRWJcp8^3ڽ;o IǹEE7~YBYcbQe -L1-(5{e53.U&qHd@ q O@=CBb?] Ŋ7.QDī8rRadǀ࿫/z]BdFЃB'C!G=CdTgTnlňBtjWN{U<$Ш }1-rDCkĕ# {sGJvVjve ~|2Mt"` <q rE%'m<*R@li6͊wT~J@Fg|q֚WXq!WXn-ڼ&yRd>lĿӇXN܌[f17-/9k2xYP<[;^WXRz +4 %Bwz ܶ6Qf>b8b<)*q+z^~Qu}th/Ufi0KF}o~{$h e$[g(o_f; ԯao`ꓓ6(ޓm$W^KMve7~I0hź,il_5(j2f`8Euu]U;͢r6QHߒٶXVjq~N̞X[X٧y}n++%ꎕ}"M֝\u}ְ եS-|'mYk{wL_['wrOԪ(2sxs5':+^>;W)bSql>!0l޵⼣em,:N;6ןb8ɍݰ&k)}qhޮ>(]']f\z{cZ-me:fò> K]{[?}GJVHZyQ{}qHhUdM-T^vi"r=Ow7*En{Ec2jw}1:s,Hs-_n#qM :2Zǖ])0ZVX_0ּm/*PF=/hDOF UYp6K~J2;0FbZsL>l:.GSibĸ=Ð T e@}->nz'b-xY?5$(ľ+ݙto~z˛SR/"mMz-@,:}wwxgR8ILiV`xKYMX+I:˩ii ߝO 2mc*K<6ܸt=B3nm(a(&B(!5ƚuPpڪB2o( ź6gZuO<-Pbܑwx>*u-=-â/J;l Kö>t2p Rl+vHiLdllwn6T .sɡuYJ>$S[O>4}h3[e;"ZEt! =oU[K a+>͵h-8ގCr{_ljO]iFZpM eq7Q*h }7uk-KANĈ Si4N?EXblBI g rk udtXU<2T"$wee%`k$ ;@<Ep+GS'&R"gg|m" ٵkOAK*wəܤ*2NGr;4)O\/ (;|a_&~ɮ!sIARPԑ *pk0X3c~о"z■xKDO NJa xIN~ҥpC>Ax++94.w;և_`O˪+ͥbg+nC}H"#ODS}5 \\ÙmaݚOdcBD׾lJ|uTYny|ONuT3Z-`3zϋۻ FZyxu&[E~ȝVdz/Ѳ #;h2-JUIx(NпX<=%1Ě)=*G?!hp_Nf#^. v^T)̫̌<Ű6c;z83Xr='ܛJxyBk)N&W)37*-+@4{Jl&9Aa%?С% )ۘ 2Szs}eI #`aصwe< ?ǂ.U͏=5[r7u/ŌT U6ZA;\V.Hԇ)Waa>(C4&feƳr pp W~U˃F=6[K+YuU)Xgb cf2y쫼1/ bL?4M\_ Iڪ >Lr}\<2"*!Oy4G|;D29Ĉ}#Q޾~A?LʑsY"h~/] ҾXߛ8;3_ɭ37<.KVc/+/xqGnѮѮhDaRDPD$13GD(A Ɔ'6S\^ ޸dN8қ.M_t5x2!~u2M1Y:ɽhu7ō_^[O/`~r,2 S7?w$Ӽ۟Z t0{?*uNN}ݯ_|k[#"4G FQD9G"* B’#X+ 5R 8>ᢁՇ.ۈ9xQdmA%@(Ka' "bQ^bz0gnա0`W ,`2<3aW!SCp~sMl嶵Gӕr/p`lˣ?LA][<-WwO=@v߯/~w| ,FCBCcΚ?^\ 4XE&rŐ(ϲRS% pa o wtԂY Gdn^l$MDB_]_ۺ#]1gglxf>fH)իoSt\ꑪ Hs ƳیG ot k2vY\K+nʮ2KʼDCIr[V2LLGWRc4,6X+XP`_*&"78J+d_nZYn/;ԄE"w)o{3~x Xfz4M\3Ek_< )ӖM)gX3j#0WҭCc+)ǯ)e~]MJ(G8pYVo-&{ї"w3)"je(E H]ΞqXū ,b& 4%@fJ#xAiP⽅CS ,{o~oR %I{>ooNR68Xx=2-=Ԓ g|?Q>^sd ?Nοz8xvϮ⯃ ?=و[ʾO{IJ"zbRQ9 xQin' rqΙx*I*h{ bFHˤ?*Fh{M2SM4k6 h3~6vzN1/"xkeޚ\qvZD#NW^頚 O B"p<ޑjW)h,,ϫF^1tzpc$\+rDth^k֘ qJNZ'QeV)3321yi1@ U$Wܗg$Q |"GQ9 d.lZ2l{$k+FIh ky"P_\g#?*їi8,9~Rzsj͝YF@ThWԇZhCwEJ8߷ԝ7qM/S۽23X>NX\_,Q әwze8LgN[#kc{ n~|g MߑArgK`D:$@v;]lNlxt?uFstEJɤ'twliKh}R˳n]]xNsBݛgWX1Y%X2ĞOʍSq{*7nO&c-EG_Q#k164Bri Ô8"R jpJnF} jz{r.t:+td$E\w4ԋ kpёfٔ[2Fnr-sm2)%Q"-X \$$!CkjA!FِZ!uPƜoMa#+|D I9?aT؞QSDı;^H{F04ôk$Wst,x&G͍1^r~=GC8kãWPzo2ˬ85jls`noģʍK Qx~uH)X>T I8&cO%IO2NJ'>Ѩ 1IOSՒy=oGЭ&itG P#ԉ<}dO(PA:tg{y$$kO𼕓k+$Cw>.{7x֒&%&C,ʫ*Caꗜِ h[4w9ʐno}bj>v=jB%#e0].&J2B=(Gk@7j8NJEAGbYZXCH]ڛk1TZX$#D1pYrƘ>=[ ٕydr?r$ryDLPK19FLB[BP b׈IMHM1 )6d* e! jITD4v><[ 2 11ȪDStlOS1{k4\ 2~o"12ʻRlbk4V<0、C)z 'P09EEBdD('W0-\Obj@@beؚ)$~$ha;s)XeDjMgRs}6\qnŭ./Z# Kdt Ԇڄb`m{6, O;;˸#?w 3 &YuDHʱW$%DR>t$H]^ 6CzLRB*%=ם;8DQI{k)=Q%.rʭ :4'9sk\L2\QNiHcT Ta!<+bp)3 )X7 7ԓSXXW,RTY䗁xkYf9y{⍕pW<'7Zۂ b`R/FY] ›F"V+6 #l=GE#´#? {ǎ ljt r$F0 FIx cO_ # s>8ω@\kD!k\]+ɍœ%("J#b M/u-ɨМ-3g:5<\ԁxO2P+Xb n/&4&vZD+V/f>089F7̋A)Nhf2^]pbT)^V7+gWM}9<\t0Zl߂{fogͬ41~^ݼ단wRiq9z|_Gs!wmx!c'"xDa6OZI>N;;zDx@yEa!ZpMiVP8{`8UNq|WeۉsXvh% xu|wҨUxSNC,4Xc(WD>,+-svL^vLÇ,}Pk+ӂZQBp|hӃSq6*vjCQW;h; NŻA"%e>zEP$ %;`pX`_wD"D1/#1?nDK$<t73+rnk8hd?`9_m nuJ#BTuThӗ?o3!Wai7YrE,9w%YrKPx)j%^0 ӄDVS}^ڼ QvjtNQj3ug_盺,q`)+ -q8'40Ds8 *L|QKY<}` s6!BnG]ۦP.nɧV^/-&l|-Smpf)(#Qɦm@Q҃5j`/_'jTURo׻uovq6F?Uxle_)Nv=3ToDfpIdaňZN:rL he$u 3|Q   2 LB9p7dkf9hE ɱA"'_QΉ$B& o(%}R* ǘqƑE'pp4)qM9;,Vwvwڵ M켢p9/((8GUAq[G7]PoP̯E(`ꙅ?(sqA {M# /TFGJ- "]B=~)ur.-o,wiSPP!Esv 5q o1* P3=h2X=ȂjNLL +e K9Ed4)x p@ݚ.w}R$DlRO'%-BC[ʯE$#0T1bPIbl,aX.uET(Ix=kJ+|) Ws3LF,91]+혗fVJM.O1Z_;W?(]Q۷\KBhߕ֘07x 3aD$c4lW$<޽$+4tNJ}" 4ڌW`Z[m+rAo:i)G$0FpG|y7c꬇B#/0:9H_5gop՜%Cty.G>ūcjW۵JWT>g|8ZфSee KTJ7gX⹦IIM| J8/4Yj> +)=;_cצ?~,| gdOG+14Rj 79khc%?ob=YimW1  DEEa'FUX7!܆!\ 4|}Tp(G1o k7Y3jaaߏ6[!>=󗛶>۟ꫯI)՜#yw۫%,ןxD x1G)g_5_o X,W[\G[<_Ysb>>;^<;w41HgJ)-m ٟf~17h&#brﲓd' BQ_k[woߖ~8Ћ̔JܬL|::LqXLObnrD|c{ [ _.4n$W^O Y.?HT$y1xP-Hؼn$=$Iǖ%Di dḪ{T^B $,5H)ށgUFJ2{Tȍ%/5- O~ '/5- ltc_ٲ3-fOO 4ԘU; jl`$ˎ-bY8T" cJ%$WC6UTc* LɄ$E1:"]!)1J INAh "%$I4UI$h|qMtR ?_|NB9CNKA3BIX &Eݒ[4i3LB?9b՚&IIaf糥9U26l*ɿ#l*e933; ½U9k+@f$m*1Ke3 Ϗ7wXm<߿WG6ck>bɋ@X($*-e姷ߕTh]rgXaퟥ8lzO]F;,ϱjpq~|ڌYN]Ynj* ]qBC ͵af]joJd_׫L CKH%yItLO,\Gs?.Vcl,X)(5~\X%o旿L =E>Ji#?:b}0x^_.fK@H/r/x:Ѓ̡,С5(Еb'h¨Nr[2 \q1^- "t5MJi@A @VΞ$vهN*6&v`&'%5D=DmcBA̙mhHxNǸs|ԉYa__DI'/zxdZPTȥ4֊M'EFi("iԒ!=x-1Lb*+#Yω;ю& Iw^D3Xsӱ[Ac6?nu]vY8V 02&u|a8% ycTHʼRQ:zskTP?)nnibl~-!;Ƹjlp䈣,B#|M ٻ6r$Wz- X3=a=23 w(QMRnwoDJ(P\&Cq e2UClMGBKORLJ-(N%3ei@E]7:?0Ǔ mx>mgX4IC%̓5DCcM*_^@ .+FMơz,Plbhיc7|7e 1$¢ {RZc opbs5-"oT) } HgA{YAa1}DGI8=\O&2{tV#IQߠH:35 VJs]E/S2E/SE+CU ),:%yfQ*gf&TRKN yM;jڴES%>sruj @t ݡ\Bs9MeA4xH+K0K1REYeb>K|p}%h bz@ BP!#HE`g4 bQoC|]pkfI;9mm7y0uV -l\L{SHd XoS Rm5ff#)TDnEbXᨋ T1o-RKBП G pQ#Is]k9"h _y((` hᕊQ`, i;sQj{v17g牷"FY?R', *kQ[)9fș#4TTA[n ?!FX){y.~SV6!LY4Nn/2ݶ%zoK1|.P UO?_5Y:Vwg" dA2+[x; Gv} (.x>_~|{3a:/mÇV۾r+ۃ7 )ug\O4cɩMכC$Eq6,9JdmiЈZke頶&& +Eɔ; pvI&\Iy)cL6Gp ɸ68RJ /m?garp+h`3UE4F;i Yz@E#*I8[^#Pĉ`.N/ETLjLh)54Aj YԾ OwmHw3v!%C8SNK 㧘ֲKZVTiHZRS`36Tf[RCϤ^GOkzWב1yt2QQ4qI=eJyev>xtt t_6`7_wX Wo7> ,j]`Lpem=4%&hfԯL¢ oFO3z&>vK\nxq0ZU_ zN1J͞`9q 36xX(Ku@M_OmeklF=O n DD%(AU)&22u&=uiYHJkL"fYaRԌnic"/37D N׶0s<Ӊ ߝ  2=Y+8$>P3jA娖^^N>bͺsrdߜ}Z10GB.o_ cݍoHn?5w}:%nR#g\2MG7<cMG=N}c'yVf'&7CK@AT yѵ:9GT"rYki[ٚNH5ˠmBDdAXsigY#K*6j ”5jɵ.7JúJ~N'[s Y=tL1E\O<3ksW׉d^jeWQ;3Paa[ou%EW5oim!hFV 8{ʳ_F9|^Mf[3*/)L+QX,it?ˆۻ񸘕UzN&ϰXF}PUkD".pҟ}1纫j3'e^:iƶRkNwwq)jH@:EQ "[NpZ*x<#U-srۅH1vUΓSp ƻ/!Њ XN8Q$U+fqhfy1#JԽ.Sly%Ij/^&Jmr"ɨ8 O!\c Jx5(r-7Q妝IG d5as)7JwZH }'?=|YK!D0ʤYX+}g`|&F1C#e,*ĽuCFS11 .FZL:ptסBKQI}~]9vX_qrqUjd%xON'|'} IV5"$X;v@{-mj5 e |)wruq?#ûnO*揊2x`sC{$QPigTg/a݌hh?ؘhQ!&sV 9_FXSDml`8s'@) *U"s(xCq@g ?khfJ8$TߪɱlU} kxԁjgr_A`RDXjb]d--cZGhϦ6zNO7ug7MU'J(X >Xfx:͏=jzGUGTpw"#0QX o)yufGh+Uvf(.# fכ;n?i%3^n8!lpa|ytx g1X4 Hw|;\~$)ɱq~$Qk3w{S-n0%N;ۇ\iQ("Y:z>,y>Ϻor3m: }0+A|x){+dZK&`3V=d_@#%6ĊWvE+[0FDO|&q$1JX!R f Py${6ݵx*IY3Yd6z%܏&֕4GAb~6n/< Ya6Oۊ0،< hmJ&yUB"Ca,yxz0y~5.1,h(b/~`n/] FU!ԳD$v{ӄf?>$b6%h`HJAǶ_cs\w}"b3Ur6ʩ };&^&۫I._9֕T3=Ē,w|'cT59c* !lZwW_]VF܌_sp'O>MQ,ɜpO'Pbf6}먨<$ǧUm"HJ9n8ÕuURg>i.e4.SߖſANHœW"!CT|j鈍N?׳_kD{T*Kn Qv9M4oe `:$qf FxyBs @_1|O78LJ;սF"+cg8BzG e,L@0D삡 UR+rzo XI,,gʽbЄx_/W5$doIMCM,w^i̳vY1M8]bX\wUdãXWeOO&(tۗ[ ˜ZsX4m_qRf 9-@W8=-}3C1/c>(X3En*EBVqW47{U75sUy/NgLw_o|V|邏޸XWP i+n2+d͹bj[Y4Y;<E .2Q«އͬ-gsrS1$s.Nߘ3sxN80D-8jo8R;V$*&  FR"ep*z|A=waͣEʩ/ꨒ5NhhWEWEڱuN+NNAC¶/f;Xy! %G\w0AD$"3^AQa6昐ƒ,4CyҨg R(dGكR^4bfk3'r2-joSp¶ɼA!+XCVl@eKU0IX)Aa{pRw8dESM ֒DQG`ߑ^gzƱ_)eź%@?,z>b<ċOWҶ+̠]%ےEZ$W,rxxGaP)@P!i`&-*~F270CFB@*苇ɳ!v%Mgp4_L&P]/sꂆВBv|ܨ %QmBIDtJi\J5p2j*͒u4ʖ2Cfef!S90%8H:5L8lv*+P/Y|h")u( (~2H(nlJ0&1NT蠋GNOsӺqt&), 9x'Bۇ!ƒiF-Tk TCi.um72&@L,ӂBs,MGL8ҡjM4;ߝ<`ޖqɍ-6sۿE_wC/7?U~%گTL?7^([x_jqVn{}/w~_z|\ Vky4 R)2`!07%R ܚ’\jXX!)jت477iSyhӖwG#P܁[x.!V[Ho-B6+- ;C^Ps=~ 6#$c~KzY_|q˹\> FĨåCFb! [@BLvþП ݇2:8 u8G3IkNթ A78?@' O~1K1+8:7ˉ48gYaLXf.j=ϵ 4ULkA!"]wlAw]laWh$kBJB +VhTkia(j. ?D]3*ձwZTknO_&ռ!>\=~مbT%(֮V?Y}4)臛zG[}֏O-S1GQD?-c[sߠwѮ?fNO`v\;';4V kUT)%)BS 0F D)$%3StFyп;s޻4lMÙDs&3K4fYt  1 2EM458yK )uX].PDjŸ.DOFG@iXR\z2!ZK 11R ALA3m,㖔( jmO "!X U%ĊA^ ^NHh\4PD$ըrKJ*,'~q,Vaҭ_Dirox%WJA3x+ǫ<ݕѣ5ÍWD%7 !A./g̙q?fgL>~~;soGAyIvweh$^Ґ40^|H$A{ 35c,N{gr2 BfA9 RJ(0A @t6Ubjq!4j 1'8$:6ۧQ,A 0)З" ʙsk(.<# @(͓b:輩-e &YޙI@! W S n5< R*(&vzK$02$m!@P37-5HIlVP2^2kȘU03 R*v 2ݱPO A }e`OݨGƅDL" &HH0d-) Fb'آ@l-zA?ͭͣKho(K|$f|eq\ڵ+VYT;ي[e+OǜϾg]>mj$ dm@lX+O Na֢8cNwpPL~,W DQ#[(p=yp WzKOԻu5mYUPl{)l)l s^Cs [60.0sta|:)R[`9FB2M5(qR4es4dL|Ô0.b, 0<~yXثoBk$~;<+MGsaDkg*8[B=Ft5ueT[i 1 p$e1H+}`ˢJ 0sp/gAunY 1aaL3cR! ܺC5QJ1T qƹzJڛ̿d7^4V\ (RQc}[Q31u3-8%-AY&]9_?~j6h⥔`TRd0$H-? PԨad<)䄾_܂;k%(]PQRɩriˉc(Qû P8L#||\m>buSXJ*hmݑRc1g@1` )WnsS"M8Iul-YC-T ,U& [IQuӊa)!oF^؉z4TT SNO^7AAv F1仠(;#Jrg7n&'Z8NԢvapnZ{ݗ7ޛvaՆ)w!=-uŽX^;|boAߖ (d[ZwC%EuNzd g_O**L͐j큽5ToVn}L/V( @I1)I5NDc$S"qهHlG9te;EG鈮yBNbtx<}paqIN@hzU/܋ψsei&]#ζm;yOʿ@J\kTWrYS1mk&b_1W@ A*h}IuQ&f5*SL˦琎ܲV,U6N%I 5YjW(J߁/dG' &QNlF~0]j'+bꟶaf/xU ²}Iq3~3 $ΧLqYЕ\ !xZ^=%-5ǏSa,/ap8`{C@\-">2& 9 3.7c~}I,accx~fZݦ*1/6٥XEmLҝ~QmEm9dMQ449joz|}\ *vuKߏ1p=?C^0ΐ (c!\s{aI==цa>^-?o| 9 ¹IM`~υ!bI`~?Z>rST4A(߶)M>CoLϛ; q_>x2+eza(kojZœ7o*.hlj:60i;Gd3)N>eYͽyvZnV5qICxX̱ؽxA:I p8n}#h;:?me;8I}\q8qcrb~[I{~`pKZ4:8hI;,~8j FZkxtp䦱X"%"h]-o?>f{MXp%S [h Ʌ$/p"Xs[{/7F DLg(iUr; 6NWCpqq 켿/!/}`9sH>;"9qp =! Q#8D*Y 'HiJN6΃ 7O` bޙdPCvq٫=dj@/DcV8È3QqB]ӉB sqfHn. 4GL]%']1)99Q1 ?=+%qRUsĆ^j"0dCOZk \DIS U>' !'y@Wc!IUW(YS5ô wTQv͒1f7֌n|uot-s~YjUO G(.+ ?]`1'@6(@{S'8?F'b*_ձA4史>IHx`g?7pxGt۲!L4P} JYa4]B`4jN izô4sB rY7# ffN5?[-ӳCegǰ0Fhy۟u-_tWN&S\aӃS\s>Qqv*^dc̆js IZ.N+zό >r)>a/^R삃Ly"IʜDI3@'L$p9> I&*h2ŜEϵ>)Ata,;"VmN>OgesΙ? 8q{!snf1Lqc !]Kydd])-ÑHb˿H~1ҕY,4ʐgD3Kg%`^qSYlW\g1-*|Є2R*HdtbQ#޵mdѿBSy?nQl vl cYRH,%۔mQKD9sιs/;աh`,F_xU fw:RWeȕPMZn zžjvOqvr^|upJ}~wǟCZ΍/F4l;6vɑj:CJaUpqR%Ω.c]E#tB* Sb2_s٣ZMޕs, yZ|8~>,p7]~0K8x U-Gf>ꑼAD3 g&`4J%B\Z\sڥG..(>t%(}:P|>/ [!]뇫AH䈲HVqFM 5`wp#b\0Ĝpx; 9H֡Bi]ͧ{9L@xq?F׺c[dWz4_#v5L;lBL>V<4>q~ w']Nea,cv6<(j,7F1vEǂ[ˑW1hȠ:5}v:V_?W7ݤ0S 8&yVSxԛh!ޛ4=<&tb-Hx13DUiִ? h^B9פ۠rrl:c۷z KmA(1jvN؜o6fN.겲~%͜($ onfyD-U'.oઑ^. Dk)N#+ 03Q!ZFB`qYQCÔ@ K!R'ĤaHXpΜ. ЭAqL'RY괄[Ns B!:A@$fMA o !pEڂ& Q #"IkF**\Z,n Lۂ@8j5C4C-QMd+ab$).bl ۂe4oBMP)d*f!)u3 @k P"K@ Ȩ5b" ܗ =0(fiK4[H'=::b4xa,hk9K$20F!fHgP Z( s0oj ϖJ$D_,ԩ㗭O7Y)tVFyS_Ï:5Y~E n|r8IV2Po0_rmQglJݣfZ_ܩl<4<@DXrĂBQKo谈qNP,)`R H!vN[l{smrr7\[2moym{s\%x]pkj߳2fKh Z$50 mf1 K9.SACgHk[7.X.>tqusŚŚʳŚźAȹb]4uqiue]Q˺XsarXVcVuFbhY\ò_slU7i+_][oJvZJNVgG{[1~ zO^gRUNVgVvujAޓvadT[6j-ڮ0vMq6+fbKf Iͦ.>$ ޻rdn Umz}SWNTk,O zN'a^ 狣ёЧX)$- #t7waQ~0))^Co{71x\ P'x"{[~wk!gNN}Wk_oY-0;A^cI:45[ 9sfqJ_nZcnԡN5vn-j"ޭ 9Es859-r1CkZh,[~QYMCΜ9R}o˻b-:ɽ.ޭż&Tyњޭ 9Es8+{k-wx'mx\ P'x;4%uniȻnM0))*fhۻ)t-Ziݭ:0Sz8Gur┠G_[9햋At{5vi^TUdU"9Es8E8m?A^cb]f.fMCΜ9ny"GbYWյ7;8~6k9nt$()O~c s+ezFG=ZHʮё 8}Dwux]^#AbNn ltlu5[;pK~rh{&G"f-u ]-H#A{Vu UW7 49qB9T8嘻s#+zz9fr]ё1:3An.HNQur{Z򀫨] r)ȩIzw9.HԩIr1w9FGgr`"]17;4c&P̻scnr$jxcr̍)ԩ Hw9.H=,N$ V>1A8% b.嘛 I}z9fF9r]ёW831EqN/Twscnx$hO/̱B]1798er9xcr̍o-j9fA*{w9.Hպ1 .scnv$}6sBp.|9f991w9&G5xsa2Zޅy9&DӋQ0=i1Mo@bSp2fQLh) $a_ (F!6A]r}K8.޿ZaԻ7tC(F:P|V(4ߦ wV+a8gfBI={yUYP V0#F J"Ih/}r H=ayi^N&‡0NL}øO>$ja~ oӻ$o NnӃ;zWfaR'?2cZl=Op@5Gy!g6?y#w8ƛ՟>G| /Oyh* R/ 5E.śsԠEV N))f∅![<5c\ BE Q-j!#>Ah2B(J`BPWi :B"OˆiLajF*y#ăa=Nx Im& >k"W&).r-" (RE&=Xol. @ۀ `Ŕ;$X%p<#FHGKo22 V䳎`TQ2:h0x@G5d2HmAkB/@HOB,,qAPJ$@IE,vf&!e6  x!tEUp7@(qT"¼;n}PqMݣdj\jo3HQ!gR켜b\LD$ WoKLs% fMQpbe3nbVB\S$SƫIۉ4YTvK|lOe{kCQx͝} (e߫*qُegϼ60ZՀkrB\ Q6u#Co[WHe4I!3ck$8TǥI\9z6-崺$]U/R!sN;a``S ROESIrY0w+.>s 8 \xNUoكנgNɊ)pgiX5u@O|D;c)q#k^;V f5 PA16U-$j,e!*ͱ" AG͢Slb⨼{TTN3R-ɚV+0;( fz ;G<ъ9CdVR*-a-B0sBluNjM`pSahi5pS2+^w{TrpXa\ae}0jb9J-SWpJ+ҡOjssRk.7#cfOM^g.~Xh;dE٣o^?@IDx\_ޫ|aG?AD[,s]?/?聜k.^wЃOq//Ju3Yr<'l[bNnA#J0 iUssTuS؏i=1 )-BqDx03qjQVD@#F\0Ĝ`Smُ UEN_=+)=- y+CKJWf±2|13ԡ*5 / j  *+{y#4Moguz8It 9LZi XǝAߩ]F%ֆŮZDL၄0Ij̼s*b"@`CFK5|RmDM_(5GLuO߻kymΨZuk0u9L7$.8tRoơ;_3Zʯ,W Q!-{[c_ حuY?{WƍJ_f=EN@o˻1q8J͖ZGj"͐BG7P}@"3|7MTsd%559ZV:x@]dWjU=]q7t4$/as-ts&ْ̂nWĭ?K TD|:4$dNyk?$%8 s\APx.Q)B2R5I_V=ܳe\?V,F<"&|k<]O3$qa-jeo/H6+hN˹+ m7//P/{gwXsiW{HLf\r&]0FWK&W9j<9Z/ۛZָ5s zvY]05Yy0o!otrJxN=a6a~roc7t~nrw~7sBlengqk<*2/ߊgq_ ;.fq8At2e78Лi~/ Cx15zϐ @& 0FcsQH"l`[̕6_?_y]z4:|ۼ|ۜ'l̷կc t.wo޴!hŌQ ḦZ|]?7V#`^@j!\+S"*fQ$y YPpuQczmL/tQb`2PyozEx(,ξܠ:pND%3)_ >Ub!I7B K.f}48}8YX`^N6SZ'S;rp.-~Oq2ӫ+\jp߼&ޜϪpn]{_h-eʽ{w,-UUnTt 1KN/lizu?l$=u8_cD#qZ*AH4$S28ޜ\iZKƆ#7)]~lb~.8f~^৻ﭼ&|M>RH}NK8x5#y&(N@\vhsvXiec|mNLH1`}۵}+;l2iQVVJkSe =5 S$u0ƦfL Ϫ&¿q"x' )%C s-]OfnR qZKͷFYF}AVJ#W3e5䕇\8oQ1aqL0ZS]R-40/Nи/f\fuezD]¡Dwh!/@J_e%/[rcUs5XGdZLԭYpw߾K~߲>-n܎rڐĥ0\3=; u)eaT2#Ց->><_Q_pfcג$Ӟ̢vɂDtݢl*>3k55zG _L+](U,]p6Cm[#ӢRʔz.k9`5R'T$Uޙ|zB̚K" ҨA\˘"Akn"H^rCMYK77(]e.w=sC%h;qEf٦>dNhNnj|ܫRq7'ΐъz'+4=3}-_dو V7 ͢vD]f <bz'ꗩhSq\"F IxZ5~vZ%]5˿?YeL{bA ȞsBO;4}/ ȯ?TT ֈ>_)U淳yh'-m#J篯|ͫ&YG H;[Q8-qZ p?\U[4'aY@ΛƝ ""LS8 _f1fjI(|yvYɞ*`QQ=8WžnY HLϑ.y0~HRkڸdbx)j[IDc&2&)LךSBKkN#;\N*Ѭ]Ak%ڋjJsB #LI8 Rpm '#!n{%vS#L,wezcgAc⮬uoS^Ѕh8 ]sE2Ks>AS M6p,(S֢j*%D} JK"?dk3ي1_l4̀POB^ py4e2wͺP)Ӄ<\#Gʘ XuG6"Yh+L7CuGWuGͯQ6Rfٲˮ;:_]mC1;? jbҕG /C%EBS}J%4JB0nw;MWu$?g`hv@pK׭] v7uU xP'Op$Qn=-#0 ͡0b+s׹(&3P-m p'w&̦sPMGl*tB8 OthlLb &RՕpC']ɬջxs%hz[LR}(գ5=*)҉dv1E+mU㫩*#?-bХھi^]ͺxaTK0o/E-T2s;`:\1q2B "VU[Ƹ?Ed1c؍dC-Zp.&^]V{u{e0FzPGT 89[ !0o]G](/Ι,K豈"VLźmɺ҆+vj!%@ փ"3KtYeN6Xaaц7@ /ʩ"moe1VW9 Db"jX۩4 'mmƖ9rTԮ=.0ցW+NTF$vEkf9p&&%~P}&&Of_jӇ[NK9'%O8swa ׶>q%b?*Qu{k,zG@\~W6Hݓ}\\J*pvՐI4zY*0LVI8Y=5#9Ap; wJljuTLPi"TſaJ~0ˈ Z-N*X+ VV<3J՞jvQE߬ZK/xP`.ZUTy0T˫)a(2u_4Om2ݛd⏓?N&i7M& R2K>8-"u6!ZKKxQQG8M&fLL= Ѵ'̩֞0NfZu ]w*2XkQ) -c1OZٞ0]ӞI5=3==ӃS5:jO؆QTq,e{)@Ӂ;۵wj'f_w-Z˽t*9H9AR kJ=N|?{ȍ`I֒~7sl%c'! 6=V"KZ43^c[,nx5 f/"YU$(+*l :c|c㣄\!NR PL4妕qja 1_=6E&kE GmAκɸ`U{I\Nf;hDTk=(AvϝO*uN'O!Po%,T ?'KO/%~]۠ ӭSD ͵TZuyXP pkG?އυhog=G8b-=!hV~< )8*:eZe@NU*dU圪j4`3" ɋKM3d&s&DF}c=p uA3>b+356VgMjoSEpbt2.v'5@%4֊Df}w5ԡBۏ gb^dビ(hefBޙ5jz֫mZMKVH99t1ƊRE.Ta&p!X"1 A2/.ZQE+Fk571 LZd9N1vpc,  aڂ_ BC26|2EKa6F9 -b  SA ID.*5B9F$睗J32'QP}sPI}w 7q| wΣ%1Oj"gJ!UgY hˆ Z$ 2 \Q8VEAz^Y ,,:6%ѫNRk.S BZM#64^G l6pA"kpH,*$KhR>,g \ ;JJIJnB("H:S1 Y8,#U9QDMҩе_ߗ8>ޭ؇[ϱ f2l靧ەp8bAϛr?^2adx9D %oʯ,@-Y%<0d,7d 8Ybs RgnȼFaaX2GOņ,2jfn9bg cG"3lɤmJY7;a 0vX<~R5fou10;rRJ }"n!/F2Go:q$kI^JQ6ζߞST6MP}7Fžj}!ӛZ[ʢ4N­]7.FBlTg>*F7k0<6gbN;yFCUݵ&GvG'nQ9U A$yac 0KY62V|HNGV&@U*Kj{nvChtS;ix6ÅjW]toJδFkKU& eoETsieoA jsOO_/~7d tbf6z w~k*9x7t[B!.SU6`Rlkc:_ռ $٦]2o.|z%K’ Rt5o5QHsUoME)mzWHM,+k~,^>am6ȏ&kQcU)-|z3(begBA7J9(浏> 8%6;o-^ ы>^e ﷄEZ`,ٍ'9,~p}qfۇrׂOm.cG/c_)(u UI>bT#ZQFHQ~\|qه H%ߠ_:*D{ATJB(ckd"RcD=Zy乬!$P*KJP}1ى3AKK༌Yt0UYB64uhBf4"d#g2(RPLͺPjKPÌsW?:| {i$PJUnП"3Ki~f]AfRb?wW/H*7+{EƮ ~;ZSA6 yvB3lRq} ^U ռ" 3NFp8^))K)ig:8&.-ޱf?>+Íja,qyʹ4)kKžMnm$d J6UL% Jm訷\Kc0rqF1XZ>t}Zn}ծ Jwol#vN+mWIT(| Q5H^׏X/V8P$#c5uf֚>ṁtMY‡&FRIAJJ#PI)1K46ND..&fkUjuIuQ~0ɤNjn~AG5yءrV/f.CjV0bAe:jkyk 럷؞* қ Vi"jBUe:U#%jj }V䔨!db8-e>f!iƭr@Re["=[Y-ޫPBbg̤+>]bB Ǚ7- 1KڲTv\_0F8؏ XK~xsZ5%cqb'UɈ+yFK')&4>7(J~ID w?zvCWyW91 ̅m|MJ7VA݊鋣>_w7)`8j}ޙA f?VL\[o|gȨQAM#ō2 R(+S[)-.R 4Wpzֽ< px߹32N&]*_M_a)  Pףa? Z~x5jyٸ:i^ݞ7=o8 CЪOFN.xMy?_^vw_ݴ%ồ{qڸ9;>na'.`ӄ!3a+_i7 }/`)zU߭؍^νSWe;n~srzvY?5:gɏ Kח'ßM{Yju2}˵+տj&S+M<_1|0yviwu܌O'U3Nw k7=C~bhcZY>=LnYrw8Ax޺o fAUGhƋmoX<>bT/*Kʩ`Ͼ޻nG)` \1Ói{hRKr?ɉ_H]>'`6;N۱;L&Cz= ~g>$hP=m|UvFnZHOj]@tFؤsd|4#0ǻŽWE/AsZ^ݫ٭smn\5ǣϯ6'xmC,.vOUkLVj陠NDԿJ_=~x \=>?e.;Bq ?7{M9bo9߃(m'@U;2=;rJi}ؚH㊓~wRMPJ~iu8Mpn=xϯL$~KW#tI& =и^ӱ'(ono WH\kl~YP/ڟ:8~o6U,n{RBʷIOī02 VgTyzۥ_~e]7ugOջ^htPn|-RY1@ &0PB&u뉃w+^ц1,n巃)$!KnAm2bAYKfgﲭ}5, 1eG zI~<uYòFPPDDpYTV̞$`'ɨF>,Rhڣq^LUM"aYGD||L천h;G Z԰EQ"qZZ PD3 ǧk]|v2):cW׿4N͏7`h{o0I-픡Ss8qռi4'.(\^yR>qBKOsQ.|J̓a)95W཮5tSȥ7in8K _Ux=9 J'sGXce$j}xTMK7C,`&d g}gٝSGil`FH\:'+ s s=F]̈'HtgU/| . ff?rDOG~%i3*6)Y<.b=_I,=SyhƜ/հ$\ߌ#I:2 h>HUYc0!r{6%  >ͨX- ˊ#<2d9s@LwK|H|Kjrx2mawu]GWW9ǭ)TRBbd* Z"Dh- ނSmS?U'N=9ܵa8kk}x>8-6$MID(שS63㜒+g)@d[e1a]x`]lg_n⻫[# <]3u͡1p#c\d)9}9|NPFضh N6uwW#ȵ'`ho fw|IqIkG K^VOT%ְ81BT#9u1 } CUSF9;G)'yf *ϢSʆ:P pj H0p)*HVQॐR kQ؄c)1lj8 @kFcgq-$KQ1;gD#f('aǕsZ_3UxyO_UiD(}8}~߾a"UPNʷ\|H}R7kB` M/IeS{~tlh8n1;gѰGƵyY ԊjHX^HB i"c?^HOox#)j!N| X?@[$D=!_ >a0iQ—.*!GٰhBߠo0s!.%k?Yb"(r4C$n頺̏q7|*T|N \9Vd];Aj,ә̄Nmr_D).3ҰLp*B5v&S+!38,9#8̛cm[8k Z@ƈ`öv^3%+D 6(L4OWX֬P .Yߑ1 a5dLm&d5+;Zyz1@ ep`?YıkZ R%+ (P;=&|)!*\G9knLfJ4>Y Ԍ)"Y}1ku3`㣟_L0j#M;G޷644)CYĘ&Qj4҄ JIÈLY,q T 0PmϗXs"M@xRq{YU6|ڶꔢ1$ π~2v`Nbed@<F9cdYÉ2.1,:tĺK8=+r e4ʄf&V`Y{)IEY&+!X\}t,T"U R2SY )$u c)fm/HتyˀlLf /!fs[1NJeAl %5 5B-fBDb@Ux, @\N㱀&f0)^. [10\ +( w)E?x> bW•x:lnLqB@ >|D%K 2-,4 R Ĝ<8Uy 9D˟EC Q`IqF+3aP. d|7f:qc7RXe9aX?'q^xT#FG`Hp #`Ө,!s%`OZl&V͕ "SPEMEOL ɷcK*LɆc{4 $ }m)G16t5'B>rT#VKzB]yPC=]Nw?p |NXQ+91h T??7 qeݛf7MV$JIDnKaPn*Ö0X`Pi6C׳8rC9U57X]z 0V]8Ū}x ̷j&l8i;|m ޕnֽ?o{$xI D^weq8t`_(s3 7_ŗfnue\~-hޟ>,F*QY+2I3&ϗ]~|??Xp߿C? W?MX_SO?Qn|Ʉx=%ltݸ?-~gn>_| {^CD^oͧnr xmq2w{4i4^~[pcPzoɧ4Rr0}X j̠RΊ}<-5ū_C?0 v[^ϰ}y{r|3W82^kX֟w7qdG"p&e3V3 ^8#21B_,3^A]Zv5Cr"׿) )a`],6GN`R؞P;c3{ fvh\^L[pE{{1G(<ؼm{8=a~LtERc@?(m޾}[,~s\.L-|R](vYU $IζɅHPjthDXH,LKK(L94V1/*ͥ7٦2XR@` R8L"g閽?妴rk:cХ;A-T$uA /D&/DO3+ZIs6 +5k,$+\5BOne\7HIuy9턱2ϩdSxÉw K"7ѯk*{'eF#Jc#\SSaEC0`xD8*gѪ۪ctpM)R)xz@Q$BBƈEW2K° ̤v3 J&%μOi$-+, 9t׾R旅O]8Du$GKҔݩD6;oqs0is۽7 N+z8 V}$]yֽѿͯW%Sߵʇ.t-m 0`6Z`r$ voNdEJ #Ky9uyW[y8o^|*Y*mNߝ)rj&)*L/JK઺5s.2R^.,1jၥ;jSRxS[#(Xǵ28]3@j&4|E>d\hJJr[60OFj(mwvBhI^XR$R@PB .,t Ji&(kž(k! M*h)2LPI e !- ZFUVq *Hh-?H,HcT EE$"HqQ/Fd$H( i͐ݑ#R?%[$}?6Sڪu|xLPLw<5#@Rǻ{8k .Ӂt- &xE_s:chvߑ3!ivz֝g1WV'ij#ό]f,`}Ui?/уr}E2(>P\Z:"9ma9d۽|zPܣ~=Q_.S"{ v~nbs*UTo3DC b*`2 hH($ FƀGUƺ;8C詹5xۭI\{C0>7@51:0iEGTRaY$j6!'e 5%sw*ÅZFyaC !\sG 7sIIwWDG4rxgs!"g6F+.>UghgL=djݳz[WgƢ[նo5x8 YAOkw"ghj;=gXgoٶ9o5-=q%:ANcТ,X(P2LA8Rxf{I=Rmglωcy'1-E1zM1ܾ5mO3)°H79ҊFFB%s 6HגA{!E(\V aAhZJ"dkiSM#}L7sO5MtD3䭃X,zCZX0Mm_N I:nc~ߌa؞3ȣAhmKhY̩,1`2DSoRI,i4n!kw 8mAE)B`FB;Zr[(|-C*vx.h L1Ыq~ si (Wr~&Kޫ$R Ѓ:AP`zg }ng4WdƩ>Ѽ/c8\x1nzpwon!YلIBso&lx TZ@qyb. ,><8QءXَCpBfhQ$]Ӟ}H'fz ATcphtgS&ݡ1N\_ةq\KS iRk';">N~ɽGgiLqاaKa#ל:ȣcGp?Umң="Gű(E rzw$BzpP)ݠTuKW.#lf{{VIn:#nSD\\sRdn3r[$XLW?G Zet MOs)[~Q;Lz 0 ]N8T.|AV LRіn¶Եzm̗wv lqP+X5Te 2(z!Gϓ~&03y:Ghss]ms6+**jF7^j>8[$[fKr.$g|ǎ%OK?mJdP )C#leV!#h o,i:6%MGЗwt,^WGSÑ쿚,=8btDQ):a_} kǵN ȻtO9S m| 0Fr)_bYc- m ٌt 2d4ajG)E!T R^!,4scuBaI:gj^"GNY^*PdL@beE+}1Q4VD~g]j@U#Y7ŝzyT)R5TO>NaJ|7R 7tEU4q) ƸP$O3 ըB<=K( P,)|[p8!r88psWgH#!C9\<]|7G08\SLFأ[kK=c.lRnmlPa2޸IΪcG ΉS 35n>YmAAȼ "@rCk|m3s~dQ6h#Ϫ{ܴyJXҵ.趩Rkm]1$=w8 &5nϯϗ?jڹ {!6w@[׍ F֓i? I=А<ǀbࠠ3XgzG(!8CڏC7⌶cЄ]Y&qμ`yE$6 RfNlSI*P34~Zn`&lqEԖw$SϵjD ! O[t5bb=a0,࡞qFZbC q샃5 qӽ(ue T`;9Gy =\^ ]޵{ J */U+w.M`dLB .@ҔJF Im6X Ozm*R_E{;T}6zwKz2{|oIAyo^K>"SX`3;J(VD9XM!Mhcs^pVT* :͙Eh-U INW\CcSBbLHmBDV,R`%3 2V,<#}ZɔfaipDX?vx6F|llql܌jQz.4eFLSL-&s,m;gM9* gZQdF f=7ۡئd2ryT2dSɪsfH:):86GUjͩtF9=nGpk>c)ptAT;؀MzVxSzs@SGL5n#I )0 1q[@ev.`]y S֒u p.yGpvt3FDݪGEV]ׂ֭#hz(`m7cE;cQl# 6jcj1b C'5[{jZ*Dʃpޓ{U2s r)]Qǝۮ(Z@K0l% s9k:qh#_jŨrѷR-Պ=ԆiCˊ) <ߐZ֪\9"~!T'%ǤNnCwZn+8of*w~ղ{“kS~:{~ }Br?M1ɹDѬS%ʭdE[XXӟ_ujS>.XkP..:WnQ6ÏauAľv8a*j$}nIh[M4ʦj !on|xH-RAc;Znzp[t{ցr],\SЂ6K bsw<<޲,8qQ9ZE_͓.K͒էճBo]gjLW-aXNq9ѡaik JpB$3eUWkH4.xCFK1P}%b,Rzʽ`P 3'Y\>ι\7_O6?{s8WcB~1;_ĦQKHV]2rE `ܻoI-GjIo|ήF%~Hn:"pbyO͑3{{vz.kVF p1 PA ZC,`&ht=C_hCoǃ[_0p8wcUxe[!<"͝}nK[ 8W~Zev f/)ItT33xBl'I1x.@\|:m1ROdݚ.R/Fj aE/>z'CCiX3ltE"11t;3@lXd<V1^aB?1&;$cN^p(]z2I*.xs3 i]$"<˅"Y܀TXIJ o2y!?2L|O?Ը_o'羪Zt}ɲUyeS^uG|o['RxzYqã+*Sz<#V)0ur>UDR藥4sdS<ԠtEfS*3XT+SA,8o򔄊Q/{ঞ$0e.kIb8EhS*OeS>vޣC|:Ќ5%Rq8tL)aMIx:EHhq3<g$6m31hھG.YRT"|*gn<[_[ɈT_?QѥYiyAYf|'f}6/k%Y #$wB粔_sNPoWcQynNQy`ߊbh%Aj':%0Ĕ?R%1ŝoldn۽*".`G;%hD`OzYRJ&1 ^$t.(\HkNtU; y΃n,B>O{Ow,/gǖw.@z@v{,AH=̘/6QFz:DXYRCɌ'\\Vg٫>;N\O7'0}qGq?/;UIq$ 9 CƁ Y~ĩi@/44t 3/:]:\!E sj{\z\_w'[-m/};MHF›1.cnf1@6V1jPL!j4NwpQৗc<'?C-zbd<I:=pz[ƌz}Ԍ7[")smv5ZQx]bUţ}o k|kgE;Mۇ_۝<ԾmHٗiEUhIU6nh=Se@g.L2^= k: u99|{1.}Jlqw}9l;PR渏ri2(©:!Ʈg1]lօ⺭~l2hNh_2b Mc4Kߊw}2:۔M\^{<}R~ˋtquw*(f?r~㷋㻿ĦiBQw. N~~{: pxRȏeRo;}lB^UCYe?^~i?7"- כH`NԖD0e(d%:71EM^Ut6;f&:ob jncP1ߘ  x 'p/1Aq`׳f̅{! Dȟ"wL=o7c##yv$>JqA7!Ka2):]?_O77$ʍ!r[/o]0&Q#}` 17c~!Z0]`bQyg%&EJdr! M-v3@O' rFR\FqBi>)pʸmoOrk%8K`ܫnbf`o^E(ɲp fEq:tХi&!+̦()@m5e3T+)x[$!W$FFrgZ)ytE_nnrH.LU~US_SoԺ&}>9+p H5vt_h#@Д3e-(%R(iC,"%*ӜJ%gC=Gb#@+E6E_Ua,\(k(K.*(T;0e)32e9i(5Ĩ;\TS¯Oԯ0|]_ߞ\Wss ) GISgHp:s[B P0҈ \gz㸵_i-Tb@OoL1K KmZ7v.ak$VҥVeZE|ggr_vN~=m7SI+[LSJ9U5`ʔj<:=7>AX4D"$2P+K$ t'+ΧH=xG}孧<;+I8q_P2e rX x߭$.QfEU~׻ ]˝y2u7};ZNk0oԨ8tםǍ.?̣/1>~2Z⮈^h/T":9;2C>aqo9jH&v\ f]y[Vlٛ5[_R*dwN ; - TiSQ[ SI;k6F(g?n'"=wݖwTo]dgtm1c"] ' 6HP16k(ŁdIvfv9٘h6ׯn5]QեKGl=er 㕋»)UmXL/)6zZ╾" _҇ C i>sb;fWY^Tng'[ʪRF8= `#*>rQ.-Ƹ^*2/NrU8Uj B@{@pV@{./)#LԪW mPOP%(j^T˖QG2 *"[dNL4юUQ"oq}ZrnU(J@ 5XLu蹆K55V%t,B <%ܨQXI%,7)Q7,G93zsk)nm5ܛFUamޛSC[x k!_ o3+#>ܒ:+13(r›F11|QĐ MFK6"Ͳ=CA h2d;a{lOݚ Qk]ޖxya1UHA?H0SD`gEm)@!(g:f?1zyQCbi#a9qӗL,dJ>ۢLzGD|۟C,%R,%̪ #opקD tTI%~TC,qUσQJi"C3B{*ր+MY#-.RFR(VixI b""XJ(}Y2 -+ȽbWT߫z)JOeel-C3}MsH"P.$@#lR/]9ב}l/MA/QV_,S1:J?U6oE FjeӦzd}ަ~9d65hy{vD6zB1'h-B2A &TӝGA,u&(*mJbZdZ5Vr5UnRGe'7r?Nv qK=,$H!^U'd^&x+%C ?VW?,,,bYQ  D8J .3ձ0*%pJMtlZICԨs:PPbatՖ\gJYJYU+-R%3㤒w":~)JA(@K΁W1ћԈћw RRU,=98ՊR,J9s 8*ҊHe$zڄŲT;ؑew, NংdV'O2Jh!Pf(vyg]-1 %@\Ӛ+C.F,TDl*RB(PID#vIGr,]ou+$mڟDP Ƀ恂^O??XL## y äYxsTњ3Vf|# MS)pÀ |@ ԩpgf{uhP H3#{r{qJ66$ڠgGwFGwR\d^|('B !eh<w,L3fѵ59(RGND sB{TڎlbP _p*KԨnzfYNv:@*zzJ(d멻 ڝu#3 5l&SҨHTZ fo! E!1R7Iеo$A*T_;|,laFE?BnC]y[voX*4ǝ 5qGmm=DgBtb)@ FyZJF95j\hG%!`6:ALehz|t?3(~ۛz!lbqxXIJBX Ҕ% UQpD|Nu@CX2"(Gꔶa-O?.)3ZXn5R.~_ PTfg$ZeQ/jD)Ny?םPW_]-zn|HwmXg QME"Xc"#iyR9&s}@"dQ-՗y e( -ci5'Ïd[bÇU pw6RC 'iM4Axl(ЁQ36 (:5LlFj`"ee=  6*`jaE7AQIC)4WvSƹOnL+njHmV=Zދy0O2d)|r T@e zc>s 6$3{)H^wixD,Y$gUtJFe22VsˊΠl9TD-N5YөGfKfPS F_uq|fuiH5ARd<&it* H*xi O4Ne/}nJR8B} ۋ`'vO5uF)L9%{fggSȁw9.n,\# Fjyܡ%ޡ6ŠPSĮm:U@ 3mFbJFɬH%ZJ+qdB?*7Cd68Nx9sjVijt٬"A#}̸T̀%0BKT2{?x 'DM|4`Q6ဈ6/;7L8R: hFa1*(1 y:WY`n;-%ţ(Ax4L*LJ[ɹUY@&!>Zom;65bj J+)}4u$}A[pe뽨8"T } iNF !w|ϒ3yݷ!$F[P[v; x饐t0x'*MmP\J(@pߝ[w{>&Tx// W>ɅE.¹y;}Sﳘσڞ.긽0~t2_"Cke;j&sw5YĢEp+TCI6bG(/&D6yњMi H$p_$s,k J[<|$-PQh)G83֎6~ G * 5Ռ[DX}-C <\YN5M9]u%<J$t!VyvO.98_#CIb+JSXI!œ8u4JVw}Z\"X[I˜F}It :P:+ͤETI|j]Hz ݪ?-} &tA Y29+[%Qf,wm݆A\BA_Y25-Z/T<03nZPNkE7a0(=03vZaPirJ/3Xcs; B/WnE[r?C)/Wcq+R?/3Xn:P4af₊ƖN+ &t`U.S _)UW؉fƫ) !k~ޣŷ(y2Bw6?N߫Γ&qu+"Y%LДyK8Wyv_ uwڞ_^G6zq^jʯab59d}IV1S9f[GlfM32Cֿ7gͲ!_G;˼~:dd`"M_gCGѭ`EUUW:X2B|Xߗ9g.\ؚ8GZnWS=L6M'?Y}yӋ'[}wp۞'a&ڧ= );dt 8L6D O+DX"(Jw)g$^ 3ZF(adx7Qv=^V[ BrrH]Qhe+ڃƕ ߅ۺ\uKn 6k{Vb[JFcR3 ܓ#7U~\4$> F&4]=lultﮐx)H" ;>ϡu□!Aon+zzgԼ&V;s #rx( _a:g 3um[3Op$$55+cL'XMg(*hG{ CMʓ1^zhJ-ZxY #GAc DS)6lNih: ⻤!޵6rB%HR}.zZY'K +\I&)߂T)iH4y=;@An v+.09l yiР*>@=/)NJG|3xw^'#"'T_;\Z*#$#hA)%I$&m{$UBZKIkj{4^8x>T-p%I\ HZiFTրԑ%ǥ['uܧ\n#2ۯ%%CG&${;$f)CVtEH K`Iy+ڹm6'4NYjng g*:[@-$Тcn0NXѹORgp((e4.8=1$z#E}胅k-$bB"wPȌhѹRK)NA_8"qw{R xLe@q1G@EYĹIUZH Эhѹ'R ; 4 ]e9` *9BxƌFCcFRn*[U]XvbEN K YKf&^{g:Q&AHFh] ъ(" Ч{w*Ĥ ɳvAi#Ϫ{<[:Ϳ[;)kə,orZkI #LIHKzl)L[ ER9Ods2sNL+^\TCRbP-5~P6{dH7XZ\iR&s T{sFZCn.*%S#_1=:-&ZRa6zWxɪaD ?Mt)G0났5{_)jy'Jbُ0ŀ6~\a*11M=Q1̩~SvL ]kiֽ3 Sl\vY}S$f xx~O?wNAF=M /.$Aw]PFՁ؇Sg8A(TsuZTw4 |(CBrt3?iսonu>)϶AqWb>N照y({qc$ŠbOfEilV5%gy gϔ_pNqLLD]y=ԥz@GDRY+/ Ѿ_ hAg{g?-W#eсVB^e,ֱ uv *:V'}С1!"׿ߟuuuuSg~:V ' mwsp6d>IP'ļ,d|]+)&&A] l d8b4$3X{/`:?|{*jGđ?r5amL8TŜ[^u5K.Wy{qbj+l{ ^ƌˑJAMn7[Rq\g F !_ eܡq=XU q׮\{dQTdC:)zq~}M7./i*^ 6W`2x: 8Cax>vAɆN>VKX|)s}%/U[м$gJP+.J%&Wڱlϯ/8É|}DbЬaa 0#řdD{`] Ypɤ(Fk#48ʒy6Ӭ+uo/_t:ri~S6,+1(?.f˷OM5i˻0u$jp 0Z(J_Gf0Zln\(7TX@aD Ljul?*DۇzJRN8S: !{U4y_Nlɯ'<4l 9nҴ  |tx[WDaPнc]}Z۟DvbP5~է$R jC$MD9lzӒ-NɎ09a$v]ywEp{\avw-Nz 4ڵ9ЅhHx߽sb zovu Q09sl}7\%6}q:WFajOJd^T =H5ensLlj*!r4KUnO:BZN&6d'5V%|Ztㅑ@Snv6Ď{-"uN~Kdp>Ddˇ%b$?DLLL:z-sx JA O"慦ZZON pV%,~Lޮt÷Bik]$ _QZ? J[tauH #Hrd,8O1BI#ʉNh- 6yqsʬ|r|,iyRc>ZĨV@Rɼ㪖.Ax >v]R@t5-C*)C<~?܇xMKz̓nzg#&lԛr9BoJhv *w_'3kEãv/wZΞoڕ@ DݭTNoP ڶ9 Ej"ON Gof󅼃]f/l- da%GD dA$2ERKqP9F-'CZVֹncUK\vRK%K;* )G`+FMі1gvVb%^39XZ*B(͝+{Y lzh^ lCEP#V{hTQNċQyuWRQE%o=%G5чܚ3NMQ{5s)jztl; :C[ w8g5\v_!`QC!0ԁAHW556l|*_C'P^31Z:v*f JLXL Q_LԆiT{mI DfAGӞ8ĀfdگҨHwѨ6;ė" r@F s+x2$ 5 \$eq{-ԆP0DAmo>6i7 %6{;wNeڗvrT~T͎Q)Ѕ1R{jQ`(jgQ*[)@E؏N͆z$>+.IzYl)eYҒ 1z)IQ]ۇ#D{h^([p>g8%(828כ*u{q]G U8ՙv' ÏBá)!V Cϣ673Wmt6p t.Tue6eڪF@5^p{F#p(b>9#3N_%-[v-يiY2D1u婏v*5UOtSh1-{?n)CP[Ý2njh~c' zи}n!H7Aay]!C u_xSUV}.@.=u^Z-<]Jb`墨FQqimg%q $' JFW#ّr•Dwquq4ӛ{号bqViukHE^iPN@ EH1/ͷ2K"Ⱥ޳l .n{vUGՄ4G[EvUn < yh\pJAEFqAa b6aJR'5 tbQJ{/j <?Eg?H2=@ RprA'<7\y' # VBJ҆;p z3v*HԔtk L]?֚hlMWO4umZ,Nas+nT>GӔtHxW4F@O;nNM4GBc[NJ^*(AobXܟkvCkB@hvr mC 3 `p DxІwMTPx+mxwS')Â6(}闙3rX(y9sdn I⭪ãAwy=} Gk 9\δA$̒wd^@ SOp{xԏ$|CEG6GJd(] -hT̲d<:F>]1}³['wH M6pQds(ٽoVY:V6&a4Pܡ84\-@IS`P{fL6Xa7]٦5X"9~ J}N$N`WHUQk Fc31؉Р|T ^ks:`'2%qTHa[ C+4$.Ǐ,9h#:r)10A1 1C %g(xjx5n"1zGjOάQ\["|,0iD`VY<9NRh[gMv(`4f3cc;B<,93fq|hSt-_=wi_) @6wr%XG Z;~gf-'<.e}p_v~{e;֙i> eoYޥx{#>S- ޷;W܏!*m9Jf^1&txy_MhO[ُ!P:vR?Bypy86cO.#{,GZ+Z-V={w>} Š{,Ƅ}_EtZ1V?Y1łΫ7Ge~h}"FX'c:{3?y Z(tzw_goZR^D{<8MN&`ۗ~{惡qӃ),bmm y*zK:k9 :8}9iU{.ΏzwԳ4]v^Uuyt뷼SwZ}n58ʹ;jK0)`eV g[$C,#>TJD rvՕ*^:)8~IsVg9DabD*0W>hĔJꨱJM8 Fm],a}$G?yRR?^ivu1hs#uDqicɎO6j7GzF+>OisrOwt~89)x"5m緢.* lD珒+`+B,KFIEʧ+'aH|6"$O)E,L50ɴ&!<lM (HjGi"4?/HLГG0H3Yݺ8q)uXT](}S-r[К/*`Ϫ_DԿ Y}u~{D?!^+#[rcMnVh;4֞IZ$\UNiiu/2>30ϵ&KǑ{ K)*x jþVGoO,'.=;Ihx*w ҅*|0$bAH",lnMcJH~=j*2@G2xLrF.[0/ q(ьYae] ث=(1y#r*.f* ٖ NV`BщZLD(ĠJ@ZQ@I fetVZ QB*S4Cơ)pnD@!SA]zШDWnrn[wJZF`7a]'*8A)38B9Z A+tZ9n}N8f0FQ*}-#4EWSO\f70+,H?;\ LD^QֻcMV "Oz CbP(m"Ek5/(:rPAPU[WJOBo~5x`3[z-|.͡>SN8M`w~򊲷G?axX/hԽ{缬ө rO dzGO4yu(dYŢV6Άwlj1K_*kVtneC[i鈂L,PXʒ-N778] :I2o!MYtRU51-٬f1c>}1ljNEJ29fC u6 =X>ݺ{,+-4vI.R}se&EsReBFkYCX󶡴W11WSY$"K9U`ՙ {@uD *p z:֮ah $ zg;h)AaJ az-[H6 si:y };=Uc7Yz=JbTףo8x Hyi?s3Ez@9z>̻@XJ&9&N^sմ^l=!cd]t88І$|4B@εzՈN@wj/1l J0HcbI ֱ.#Q)2rz L* ,YYdeJqA)p+CgfJڒ?!&\pwAуJ?AڡR؜,Jx݆I{-- SŔtu"\6Eg|[8 &bD>Gfy,]@R%(4WFLDn3b$JxehܭNL_FhVgHD-S;8(pAYKjnSo]t>IƑC%^5iv&2P"{_^Z (8K3ba^5JXԯ1SxՈ }!;2BNF+kj nF~^711(K]NM%WJpB EN[ . R;)Ď\61x(r״;nsO7 пqrzZF5\=^f0[ev*5+\4s'cJ}6b%6x6B`rr~K 5x'Uϫ1'oPAAöGp|ȗ'˟R~7U7x1ىw8ߟQ8!mt.G@BF> uHeL2l`sDi E+\BTk7IԈ+$Y*6VJYdIa,ʻڴX`(۟}T\7T jXZC1کb61c-`uc}\`+|r3O+J>Wzںن 7@:*LЪRB%@(<Vx!b`f:Uxl GAlEH9`ۻj֋IPc%*7y߿aV}*7 Lrl+Fϻ-:M *߽"?c0ZȔʩvaKZt$;5eNT&A$zDEqzDCEʲP~*VZ·Y_[C58 %v5 进>rWGq.$L*zM6NppQ~}.W`rcX'MUAlծ*rc4L-K}R^ŬS}E)*3\oZ z }s@P#uW^,hPEx qx5p˶2 q[#C_tV#˲J.D;'Di+C8h+sy>sq֙ۜ%/1'$A]mZ&Z>8%Ν;(Ю:uKӹl6?5haZpnU3&74 7ܜ{Smt!G P3.ˆjdI79׀HE8 K@!8,!@G0"$![24|{RW,OI%vYE1z8E[fu2ˢ̀#VϯyGKP gq~8VG/@x ټvGy_vSw;`6_R3)HS^l"!Ɨ_ Q>$l,a36NܹD jmI7s{6wK 1%텶{YkS9VSf~~kx[mEޮy۔9֙tJ[5i~#ڊ9cZ(I>?d(Rb8ЌQ}zW A۟؟ bIOu|a|G6}+| F669Ltym_hCvvar/home/core/zuul-output/logs/kubelet.log0000644000000000000000003544276115135262075017717 0ustar rootrootJan 25 00:06:29 crc systemd[1]: Starting Kubernetes Kubelet... Jan 25 00:06:29 crc restorecon[4689]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Jan 25 00:06:29 crc restorecon[4689]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Jan 25 00:06:30 crc kubenswrapper[4985]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.077417 4985 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080186 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080256 4985 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080301 4985 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080343 4985 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080384 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080424 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080464 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080518 4985 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080562 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080603 4985 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080905 4985 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.080964 4985 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081011 4985 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081052 4985 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081093 4985 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081161 4985 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081218 4985 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081263 4985 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081311 4985 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081356 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081397 4985 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081438 4985 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081480 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081525 4985 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081573 4985 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081619 4985 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081661 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081702 4985 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081742 4985 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081782 4985 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.081822 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082046 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082092 4985 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082163 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082209 4985 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082250 4985 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082290 4985 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082332 4985 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082379 4985 feature_gate.go:330] unrecognized feature gate: Example Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082484 4985 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082529 4985 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082584 4985 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082629 4985 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082675 4985 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082718 4985 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082759 4985 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082804 4985 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082845 4985 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082891 4985 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082938 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.082980 4985 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083023 4985 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083063 4985 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083114 4985 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083180 4985 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083221 4985 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083261 4985 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083301 4985 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083353 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083396 4985 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083440 4985 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083486 4985 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083544 4985 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083585 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083625 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083669 4985 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083710 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083767 4985 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083818 4985 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083864 4985 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.083905 4985 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084151 4985 flags.go:64] FLAG: --address="0.0.0.0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084213 4985 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084262 4985 flags.go:64] FLAG: --anonymous-auth="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084319 4985 flags.go:64] FLAG: --application-metrics-count-limit="100" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084366 4985 flags.go:64] FLAG: --authentication-token-webhook="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084412 4985 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084462 4985 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084506 4985 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084552 4985 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084596 4985 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084652 4985 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084697 4985 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084744 4985 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084788 4985 flags.go:64] FLAG: --cgroup-root="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084830 4985 flags.go:64] FLAG: --cgroups-per-qos="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084872 4985 flags.go:64] FLAG: --client-ca-file="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084918 4985 flags.go:64] FLAG: --cloud-config="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.084965 4985 flags.go:64] FLAG: --cloud-provider="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085008 4985 flags.go:64] FLAG: --cluster-dns="[]" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085059 4985 flags.go:64] FLAG: --cluster-domain="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085107 4985 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085174 4985 flags.go:64] FLAG: --config-dir="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085219 4985 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085261 4985 flags.go:64] FLAG: --container-log-max-files="5" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085313 4985 flags.go:64] FLAG: --container-log-max-size="10Mi" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085363 4985 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085411 4985 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085456 4985 flags.go:64] FLAG: --containerd-namespace="k8s.io" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085499 4985 flags.go:64] FLAG: --contention-profiling="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085544 4985 flags.go:64] FLAG: --cpu-cfs-quota="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085592 4985 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085636 4985 flags.go:64] FLAG: --cpu-manager-policy="none" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085681 4985 flags.go:64] FLAG: --cpu-manager-policy-options="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085732 4985 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085786 4985 flags.go:64] FLAG: --enable-controller-attach-detach="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085835 4985 flags.go:64] FLAG: --enable-debugging-handlers="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085878 4985 flags.go:64] FLAG: --enable-load-reader="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085920 4985 flags.go:64] FLAG: --enable-server="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.085962 4985 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086013 4985 flags.go:64] FLAG: --event-burst="100" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086066 4985 flags.go:64] FLAG: --event-qps="50" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086140 4985 flags.go:64] FLAG: --event-storage-age-limit="default=0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086189 4985 flags.go:64] FLAG: --event-storage-event-limit="default=0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086232 4985 flags.go:64] FLAG: --eviction-hard="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086285 4985 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086329 4985 flags.go:64] FLAG: --eviction-minimum-reclaim="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086373 4985 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086416 4985 flags.go:64] FLAG: --eviction-soft="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086464 4985 flags.go:64] FLAG: --eviction-soft-grace-period="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086507 4985 flags.go:64] FLAG: --exit-on-lock-contention="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086552 4985 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086596 4985 flags.go:64] FLAG: --experimental-mounter-path="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086641 4985 flags.go:64] FLAG: --fail-cgroupv1="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086685 4985 flags.go:64] FLAG: --fail-swap-on="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086726 4985 flags.go:64] FLAG: --feature-gates="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086774 4985 flags.go:64] FLAG: --file-check-frequency="20s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086817 4985 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086865 4985 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086919 4985 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.086986 4985 flags.go:64] FLAG: --healthz-port="10248" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087035 4985 flags.go:64] FLAG: --help="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087079 4985 flags.go:64] FLAG: --hostname-override="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087144 4985 flags.go:64] FLAG: --housekeeping-interval="10s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087194 4985 flags.go:64] FLAG: --http-check-frequency="20s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087261 4985 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087316 4985 flags.go:64] FLAG: --image-credential-provider-config="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087359 4985 flags.go:64] FLAG: --image-gc-high-threshold="85" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087402 4985 flags.go:64] FLAG: --image-gc-low-threshold="80" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087445 4985 flags.go:64] FLAG: --image-service-endpoint="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087495 4985 flags.go:64] FLAG: --kernel-memcg-notification="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087559 4985 flags.go:64] FLAG: --kube-api-burst="100" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087610 4985 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087653 4985 flags.go:64] FLAG: --kube-api-qps="50" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087695 4985 flags.go:64] FLAG: --kube-reserved="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087737 4985 flags.go:64] FLAG: --kube-reserved-cgroup="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087779 4985 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087830 4985 flags.go:64] FLAG: --kubelet-cgroups="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087874 4985 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087917 4985 flags.go:64] FLAG: --lock-file="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.087960 4985 flags.go:64] FLAG: --log-cadvisor-usage="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088006 4985 flags.go:64] FLAG: --log-flush-frequency="5s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088058 4985 flags.go:64] FLAG: --log-json-info-buffer-size="0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088138 4985 flags.go:64] FLAG: --log-json-split-stream="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088192 4985 flags.go:64] FLAG: --log-text-info-buffer-size="0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088245 4985 flags.go:64] FLAG: --log-text-split-stream="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088293 4985 flags.go:64] FLAG: --logging-format="text" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088344 4985 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088405 4985 flags.go:64] FLAG: --make-iptables-util-chains="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088468 4985 flags.go:64] FLAG: --manifest-url="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088530 4985 flags.go:64] FLAG: --manifest-url-header="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088592 4985 flags.go:64] FLAG: --max-housekeeping-interval="15s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088661 4985 flags.go:64] FLAG: --max-open-files="1000000" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088730 4985 flags.go:64] FLAG: --max-pods="110" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088794 4985 flags.go:64] FLAG: --maximum-dead-containers="-1" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088861 4985 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088923 4985 flags.go:64] FLAG: --memory-manager-policy="None" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.088986 4985 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089048 4985 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089142 4985 flags.go:64] FLAG: --node-ip="192.168.126.11" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089208 4985 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089269 4985 flags.go:64] FLAG: --node-status-max-images="50" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089335 4985 flags.go:64] FLAG: --node-status-update-frequency="10s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089395 4985 flags.go:64] FLAG: --oom-score-adj="-999" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089457 4985 flags.go:64] FLAG: --pod-cidr="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089514 4985 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089587 4985 flags.go:64] FLAG: --pod-manifest-path="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089644 4985 flags.go:64] FLAG: --pod-max-pids="-1" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089701 4985 flags.go:64] FLAG: --pods-per-core="0" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089761 4985 flags.go:64] FLAG: --port="10250" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089816 4985 flags.go:64] FLAG: --protect-kernel-defaults="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089871 4985 flags.go:64] FLAG: --provider-id="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089925 4985 flags.go:64] FLAG: --qos-reserved="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.089986 4985 flags.go:64] FLAG: --read-only-port="10255" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090046 4985 flags.go:64] FLAG: --register-node="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090101 4985 flags.go:64] FLAG: --register-schedulable="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090241 4985 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090311 4985 flags.go:64] FLAG: --registry-burst="10" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090366 4985 flags.go:64] FLAG: --registry-qps="5" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090420 4985 flags.go:64] FLAG: --reserved-cpus="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090485 4985 flags.go:64] FLAG: --reserved-memory="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090548 4985 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090604 4985 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090664 4985 flags.go:64] FLAG: --rotate-certificates="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090724 4985 flags.go:64] FLAG: --rotate-server-certificates="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090788 4985 flags.go:64] FLAG: --runonce="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090843 4985 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090903 4985 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.090959 4985 flags.go:64] FLAG: --seccomp-default="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091014 4985 flags.go:64] FLAG: --serialize-image-pulls="true" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091070 4985 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091147 4985 flags.go:64] FLAG: --storage-driver-db="cadvisor" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091206 4985 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091270 4985 flags.go:64] FLAG: --storage-driver-password="root" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091331 4985 flags.go:64] FLAG: --storage-driver-secure="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091436 4985 flags.go:64] FLAG: --storage-driver-table="stats" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091499 4985 flags.go:64] FLAG: --storage-driver-user="root" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091568 4985 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091654 4985 flags.go:64] FLAG: --sync-frequency="1m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091719 4985 flags.go:64] FLAG: --system-cgroups="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091768 4985 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091816 4985 flags.go:64] FLAG: --system-reserved-cgroup="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091869 4985 flags.go:64] FLAG: --tls-cert-file="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091915 4985 flags.go:64] FLAG: --tls-cipher-suites="[]" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.091960 4985 flags.go:64] FLAG: --tls-min-version="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092006 4985 flags.go:64] FLAG: --tls-private-key-file="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092051 4985 flags.go:64] FLAG: --topology-manager-policy="none" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092112 4985 flags.go:64] FLAG: --topology-manager-policy-options="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092201 4985 flags.go:64] FLAG: --topology-manager-scope="container" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092277 4985 flags.go:64] FLAG: --v="2" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092344 4985 flags.go:64] FLAG: --version="false" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092405 4985 flags.go:64] FLAG: --vmodule="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092472 4985 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.092540 4985 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.092785 4985 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.092860 4985 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.092930 4985 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.092997 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093061 4985 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093146 4985 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093207 4985 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093275 4985 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093322 4985 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093376 4985 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093440 4985 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093496 4985 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093547 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093595 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093641 4985 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093686 4985 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093741 4985 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093784 4985 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093825 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093868 4985 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093910 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093953 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.093995 4985 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094097 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094164 4985 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094208 4985 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094251 4985 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094293 4985 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094335 4985 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094376 4985 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094426 4985 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094469 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094518 4985 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094574 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094625 4985 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094683 4985 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094743 4985 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094810 4985 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094869 4985 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094917 4985 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.094959 4985 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095006 4985 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095048 4985 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095093 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095156 4985 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095202 4985 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095253 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095297 4985 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095338 4985 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095379 4985 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095425 4985 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095466 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095510 4985 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095551 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095594 4985 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095637 4985 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095724 4985 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095778 4985 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095829 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095874 4985 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095920 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.095966 4985 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096008 4985 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096050 4985 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096095 4985 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096156 4985 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096210 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096253 4985 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096294 4985 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096340 4985 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.096382 4985 feature_gate.go:330] unrecognized feature gate: Example Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.096435 4985 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.110942 4985 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.110996 4985 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111169 4985 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111188 4985 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111197 4985 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111206 4985 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111215 4985 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111223 4985 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111231 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111239 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111247 4985 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111255 4985 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111263 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111271 4985 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111278 4985 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111287 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111294 4985 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111303 4985 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111310 4985 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111318 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111326 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111333 4985 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111341 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111349 4985 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111357 4985 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111380 4985 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111388 4985 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111400 4985 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111413 4985 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111422 4985 feature_gate.go:330] unrecognized feature gate: Example Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111431 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111440 4985 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111450 4985 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111460 4985 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111468 4985 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111477 4985 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111485 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111493 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111500 4985 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111508 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111516 4985 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111524 4985 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111531 4985 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111539 4985 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111547 4985 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111557 4985 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111567 4985 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111576 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111584 4985 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111592 4985 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111600 4985 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111607 4985 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111615 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111623 4985 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111631 4985 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111639 4985 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111649 4985 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111658 4985 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111667 4985 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111675 4985 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111683 4985 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111691 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111700 4985 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111707 4985 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111716 4985 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111724 4985 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111731 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111739 4985 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111746 4985 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111754 4985 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111763 4985 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111773 4985 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.111783 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.111795 4985 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112082 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112097 4985 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112105 4985 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112136 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112145 4985 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112154 4985 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112162 4985 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112171 4985 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112179 4985 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112188 4985 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112197 4985 feature_gate.go:330] unrecognized feature gate: PinnedImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112205 4985 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112212 4985 feature_gate.go:330] unrecognized feature gate: Example Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112220 4985 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112228 4985 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112236 4985 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112244 4985 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112252 4985 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112260 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112268 4985 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112276 4985 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112284 4985 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112294 4985 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112307 4985 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112318 4985 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112328 4985 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112337 4985 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112345 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112353 4985 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112362 4985 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112370 4985 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112379 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112387 4985 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112395 4985 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112402 4985 feature_gate.go:330] unrecognized feature gate: PlatformOperators Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112410 4985 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112419 4985 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112428 4985 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112436 4985 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112443 4985 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112451 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112459 4985 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112468 4985 feature_gate.go:330] unrecognized feature gate: OVNObservability Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112476 4985 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112484 4985 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112491 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112500 4985 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112508 4985 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112515 4985 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112523 4985 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112531 4985 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112538 4985 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112546 4985 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112554 4985 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112562 4985 feature_gate.go:330] unrecognized feature gate: GatewayAPI Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112571 4985 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112578 4985 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112586 4985 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112596 4985 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112605 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112613 4985 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112620 4985 feature_gate.go:330] unrecognized feature gate: SignatureStores Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112631 4985 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112640 4985 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112649 4985 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112657 4985 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112666 4985 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112674 4985 feature_gate.go:330] unrecognized feature gate: NewOLM Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112682 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112691 4985 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.112698 4985 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.112709 4985 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.113235 4985 server.go:940] "Client rotation is on, will bootstrap in background" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.117363 4985 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.117524 4985 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.118326 4985 server.go:997] "Starting client certificate rotation" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.118368 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.118606 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-17 00:01:47.677018496 +0000 UTC Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.118863 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.125298 4985 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.127968 4985 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.128161 4985 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.140982 4985 log.go:25] "Validated CRI v1 runtime API" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.155656 4985 log.go:25] "Validated CRI v1 image API" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.157722 4985 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.159945 4985 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-01-25-00-02-24-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.159986 4985 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.187228 4985 manager.go:217] Machine: {Timestamp:2026-01-25 00:06:30.184765846 +0000 UTC m=+0.216702199 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:66207c91-b7c1-4e06-9d97-3e311fb7e34e BootID:c1c20c60-871f-4427-926e-8b5954451554 Filesystems:[{Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:1d:0d:c6 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:1d:0d:c6 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:ac:a1:83 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:e4:fd:d0 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:d4:1d:03 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:6a:81:ac Speed:-1 Mtu:1496} {Name:eth10 MacAddress:02:e5:1b:79:bd:49 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:ea:d6:ce:11:53:cb Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.187731 4985 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.187989 4985 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.189181 4985 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.189515 4985 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.189563 4985 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.189899 4985 topology_manager.go:138] "Creating topology manager with none policy" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.189920 4985 container_manager_linux.go:303] "Creating device plugin manager" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.190271 4985 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.190310 4985 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.190722 4985 state_mem.go:36] "Initialized new in-memory state store" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.190903 4985 server.go:1245] "Using root directory" path="/var/lib/kubelet" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.192070 4985 kubelet.go:418] "Attempting to sync node with API server" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.192142 4985 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.192186 4985 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.192207 4985 kubelet.go:324] "Adding apiserver pod source" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.192228 4985 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.194299 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.194297 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.194407 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.194442 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.194475 4985 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.194896 4985 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.195969 4985 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196811 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196865 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196887 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196905 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196934 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196947 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196965 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.196993 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.197012 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.197038 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.197094 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.197113 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.197436 4985 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.198214 4985 server.go:1280] "Started kubelet" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.198264 4985 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.198381 4985 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.198606 4985 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.199315 4985 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.199773 4985 server.go:460] "Adding debug handlers to kubelet server" Jan 25 00:06:30 crc systemd[1]: Started Kubernetes Kubelet. Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.200899 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.196:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188dd09a6701c11b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:06:30.198182171 +0000 UTC m=+0.230118484,LastTimestamp:2026-01-25 00:06:30.198182171 +0000 UTC m=+0.230118484,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.202605 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.202675 4985 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.202837 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 11:38:48.087361242 +0000 UTC Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.202957 4985 volume_manager.go:287] "The desired_state_of_world populator starts" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.202972 4985 volume_manager.go:289] "Starting Kubelet Volume Manager" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.202920 4985 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.203177 4985 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.208643 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.208944 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.211045 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="200ms" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.211592 4985 factory.go:55] Registering systemd factory Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.211610 4985 factory.go:221] Registration of the systemd container factory successfully Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.212148 4985 factory.go:153] Registering CRI-O factory Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.212227 4985 factory.go:221] Registration of the crio container factory successfully Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.213866 4985 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.213924 4985 factory.go:103] Registering Raw factory Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.213951 4985 manager.go:1196] Started watching for new ooms in manager Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.215972 4985 manager.go:319] Starting recovery of all containers Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.218618 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.218776 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.218853 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.218927 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.218997 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219068 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219167 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219244 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219316 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219387 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219458 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219532 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219602 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219681 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219763 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219837 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219913 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.219997 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.220070 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.220162 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.220236 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.220306 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.220393 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221014 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221049 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221064 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221123 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221138 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221152 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221188 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221201 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221212 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221223 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221235 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221246 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221257 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221269 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221279 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221290 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221301 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221321 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221332 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221383 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221399 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221426 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221442 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221459 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221475 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221491 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221505 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221521 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221537 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221559 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221573 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221584 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221596 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221607 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221620 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221633 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221646 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221657 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221670 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221686 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221700 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221715 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221729 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221742 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221778 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221790 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221802 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221813 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221825 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221838 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221855 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221867 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221878 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221889 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221900 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221912 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221925 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221935 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221945 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221955 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221965 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221976 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.221987 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222017 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222029 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222040 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222050 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222060 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222070 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222081 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222097 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222112 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222142 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222151 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222161 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222171 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222182 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222237 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222249 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222261 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222273 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222298 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222313 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222355 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222373 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222388 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222401 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222416 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222427 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222438 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222450 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222460 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222470 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222481 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222493 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222503 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222513 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222557 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222568 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222578 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222589 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222599 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222610 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222622 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222632 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222642 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222653 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222664 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222676 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222686 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222698 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222710 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222722 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222734 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222745 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222755 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222765 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222775 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222785 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222796 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222807 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222816 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222825 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222835 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222847 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222858 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222868 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222878 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222888 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222899 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222908 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.222917 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.224950 4985 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225020 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225057 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225085 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225248 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225300 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225329 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225359 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225445 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225469 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225491 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225511 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225530 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225548 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225568 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225587 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225606 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225625 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225649 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225671 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225691 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225711 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225730 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225749 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225769 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225788 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225807 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225827 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225847 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225866 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225885 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225908 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225926 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225955 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225976 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.225998 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226017 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226084 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226204 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226234 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226258 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226281 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226303 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226326 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226349 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226372 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226394 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226420 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226448 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226477 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226506 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226537 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226565 4985 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226591 4985 reconstruct.go:97] "Volume reconstruction finished" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.226610 4985 reconciler.go:26] "Reconciler: start to sync state" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.248345 4985 manager.go:324] Recovery completed Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.260081 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.262836 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.262890 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.262904 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.263947 4985 cpu_manager.go:225] "Starting CPU manager" policy="none" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.263965 4985 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.263985 4985 state_mem.go:36] "Initialized new in-memory state store" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.270415 4985 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.273312 4985 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.273370 4985 status_manager.go:217] "Starting to sync pod status with apiserver" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.273402 4985 kubelet.go:2335] "Starting kubelet main sync loop" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.273472 4985 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.302556 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.302679 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.303258 4985 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.308934 4985 policy_none.go:49] "None policy: Start" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.310151 4985 memory_manager.go:170] "Starting memorymanager" policy="None" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.310273 4985 state_mem.go:35] "Initializing new in-memory state store" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.364986 4985 manager.go:334] "Starting Device Plugin manager" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.365220 4985 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.365256 4985 server.go:79] "Starting device plugin registration server" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.365845 4985 eviction_manager.go:189] "Eviction manager: starting control loop" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.365872 4985 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.366059 4985 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.366271 4985 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.366291 4985 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.373584 4985 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.373708 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.375319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.375397 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.375424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.375734 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.375948 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.376000 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.376868 4985 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377218 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377459 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377480 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377491 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.377610 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.378039 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.378073 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.378844 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.378893 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.378912 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379097 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379200 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379234 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379647 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379705 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.379726 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380225 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380244 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380315 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380357 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380473 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380603 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.380649 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381204 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381267 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381418 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381462 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381434 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381517 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.381480 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.382371 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.382415 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.382430 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.411984 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="400ms" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428242 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428283 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428303 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428324 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428348 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428374 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428400 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428467 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428566 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428636 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428709 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428767 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428859 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.428948 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.429007 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.466024 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.467185 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.467217 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.467226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.467248 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.467981 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.196:6443: connect: connection refused" node="crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.530980 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531249 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531422 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531355 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531492 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531620 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531644 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531698 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531730 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531779 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531800 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531842 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531863 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531883 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531922 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531944 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.531970 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532009 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532028 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532312 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532344 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532364 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532403 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532405 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532431 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532450 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532436 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532467 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532484 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.532502 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.668321 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.670306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.670394 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.670415 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.670481 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.671388 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.196:6443: connect: connection refused" node="crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.722034 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.752005 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-55670619f0779944202a48c8994febd08d32b25209aff5c49b3f0b07330dff35 WatchSource:0}: Error finding container 55670619f0779944202a48c8994febd08d32b25209aff5c49b3f0b07330dff35: Status 404 returned error can't find the container with id 55670619f0779944202a48c8994febd08d32b25209aff5c49b3f0b07330dff35 Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.755398 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.770895 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-e170a47af26d68b47161a3cba3a6891f4bf4bf8794c17e107f02e0a3d468a1ed WatchSource:0}: Error finding container e170a47af26d68b47161a3cba3a6891f4bf4bf8794c17e107f02e0a3d468a1ed: Status 404 returned error can't find the container with id e170a47af26d68b47161a3cba3a6891f4bf4bf8794c17e107f02e0a3d468a1ed Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.777521 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.793159 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.799494 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-9154920a7d8b5ad30a50287e635c4c7dafd84e3b4852f2989481c099986dc6dd WatchSource:0}: Error finding container 9154920a7d8b5ad30a50287e635c4c7dafd84e3b4852f2989481c099986dc6dd: Status 404 returned error can't find the container with id 9154920a7d8b5ad30a50287e635c4c7dafd84e3b4852f2989481c099986dc6dd Jan 25 00:06:30 crc kubenswrapper[4985]: I0125 00:06:30.800857 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Jan 25 00:06:30 crc kubenswrapper[4985]: W0125 00:06:30.808874 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-56263fe2ca65bb1f2d9a951161cf782d8c2af34830be1e830de6dd5c1397993d WatchSource:0}: Error finding container 56263fe2ca65bb1f2d9a951161cf782d8c2af34830be1e830de6dd5c1397993d: Status 404 returned error can't find the container with id 56263fe2ca65bb1f2d9a951161cf782d8c2af34830be1e830de6dd5c1397993d Jan 25 00:06:30 crc kubenswrapper[4985]: E0125 00:06:30.813089 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="800ms" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.072334 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.074494 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.074532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.074541 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.074567 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.075000 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.196:6443: connect: connection refused" node="crc" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.199582 4985 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.203721 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 00:46:04.549627226 +0000 UTC Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.278209 4985 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ed4fa079c270fd176aaf6bb587eaa6e5c1cec4af40b215d621ebc50343f62008" exitCode=0 Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.278267 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ed4fa079c270fd176aaf6bb587eaa6e5c1cec4af40b215d621ebc50343f62008"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.278337 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"958a58809b7ee25312689344723099a5ae7cfe3a69cd9566bf11933970b88b3b"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.278430 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.279337 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.279382 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.279394 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.280807 4985 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25" exitCode=0 Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.282301 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.282374 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"56263fe2ca65bb1f2d9a951161cf782d8c2af34830be1e830de6dd5c1397993d"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.282537 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.283723 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.283775 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.283793 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.284289 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.284338 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9154920a7d8b5ad30a50287e635c4c7dafd84e3b4852f2989481c099986dc6dd"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.287642 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99" exitCode=0 Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.287691 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.287749 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e170a47af26d68b47161a3cba3a6891f4bf4bf8794c17e107f02e0a3d468a1ed"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.287883 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.289250 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.289291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.289310 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.290193 4985 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170" exitCode=0 Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.290238 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.290283 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"55670619f0779944202a48c8994febd08d32b25209aff5c49b3f0b07330dff35"} Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.290454 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.291640 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.291676 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.291688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.293851 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.295667 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.295710 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.295727 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: W0125 00:06:31.454793 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.454890 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:31 crc kubenswrapper[4985]: W0125 00:06:31.592652 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.592798 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.614609 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="1.6s" Jan 25 00:06:31 crc kubenswrapper[4985]: W0125 00:06:31.703782 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.703859 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.758884 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.196:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.188dd09a6701c11b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:06:30.198182171 +0000 UTC m=+0.230118484,LastTimestamp:2026-01-25 00:06:30.198182171 +0000 UTC m=+0.230118484,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:06:31 crc kubenswrapper[4985]: W0125 00:06:31.774411 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.196:6443: connect: connection refused Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.774486 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.196:6443: connect: connection refused" logger="UnhandledError" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.877310 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.885455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.885513 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.885531 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:31 crc kubenswrapper[4985]: I0125 00:06:31.885576 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:31 crc kubenswrapper[4985]: E0125 00:06:31.886366 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.196:6443: connect: connection refused" node="crc" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.203881 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 10:03:52.836318149 +0000 UTC Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.220090 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.293216 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.293251 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.293260 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.293333 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.294010 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.294082 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.294180 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.295813 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.295890 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.295947 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.296050 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.296722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.296802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.296859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.298729 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.298803 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.298867 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.298924 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.300258 4985 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6" exitCode=0 Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.300361 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.300487 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.301231 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.301317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.301373 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.303618 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"7f5f3afd65d384d3a15aa4feac4649bea5fbf3b5aa3f314eb19a62a483119bbc"} Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.303755 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.305210 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.305302 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:32 crc kubenswrapper[4985]: I0125 00:06:32.305364 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.185857 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.204050 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 00:23:14.3582046 +0000 UTC Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.313429 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef"} Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.313587 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.314972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.315021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.315039 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.317916 4985 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff" exitCode=0 Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.318016 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff"} Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.318084 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.318197 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.318254 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320304 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320325 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320347 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320381 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320398 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.320855 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.487497 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.488666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.488699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.488717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:33 crc kubenswrapper[4985]: I0125 00:06:33.488744 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.204220 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 11:34:10.937498332 +0000 UTC Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.332668 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333132 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"8d0f5e5dd040b55d2048fcf8872f95b7f59b9d98b1f57b8db9279566a0fd7c82"} Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333182 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cd19b3d06299db37381da7ccb0a0c18b7218ac4b11086a71ff99f0b66fe8bd32"} Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333199 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3d16406949c0e3b968f27eb5903d09f4f6f00a048431e549dad52ed6ca874db3"} Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333212 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b575ed7dde44ce49b3579b9d416b60d59d03bc782f4b098759ee2ca4ad97a347"} Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333235 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333513 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:34 crc kubenswrapper[4985]: I0125 00:06:34.333523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.204774 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 19:18:37.710152323 +0000 UTC Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.339457 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"52c0bd735ebf4924b28a8f190b638051fbfa6d711826cedaf478a0bb0350daed"} Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.339547 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.339576 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.340486 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.340518 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.340530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.340940 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.340990 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.341008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.422528 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.422689 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.423998 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.424046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.424063 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.435309 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.595232 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Jan 25 00:06:35 crc kubenswrapper[4985]: I0125 00:06:35.795529 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.205355 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 03:34:11.213578359 +0000 UTC Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.342702 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.342875 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.343276 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344057 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344411 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344465 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.344486 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.345900 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.345968 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:36 crc kubenswrapper[4985]: I0125 00:06:36.345994 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.207072 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 23:23:53.20842173 +0000 UTC Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.345598 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.347100 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.347298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.347318 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.422743 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.422961 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.424937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.425164 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.425310 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:37 crc kubenswrapper[4985]: I0125 00:06:37.430006 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.128950 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.129291 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.130765 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.130817 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.130837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.208147 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 15:18:04.097419305 +0000 UTC Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.348510 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.349737 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.349791 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.349810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.423156 4985 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.423225 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.813390 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.813597 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.815291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.815366 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:38 crc kubenswrapper[4985]: I0125 00:06:38.815408 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:39 crc kubenswrapper[4985]: I0125 00:06:39.208851 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 10:12:55.150228289 +0000 UTC Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.128739 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.128961 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.130514 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.130567 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.130582 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:40 crc kubenswrapper[4985]: I0125 00:06:40.209471 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 09:20:49.623362502 +0000 UTC Jan 25 00:06:40 crc kubenswrapper[4985]: E0125 00:06:40.376996 4985 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Jan 25 00:06:41 crc kubenswrapper[4985]: I0125 00:06:41.210507 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 20:31:26.007563322 +0000 UTC Jan 25 00:06:42 crc kubenswrapper[4985]: I0125 00:06:42.200792 4985 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Jan 25 00:06:42 crc kubenswrapper[4985]: I0125 00:06:42.211411 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 01:55:13.938808317 +0000 UTC Jan 25 00:06:42 crc kubenswrapper[4985]: E0125 00:06:42.222022 4985 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 25 00:06:42 crc kubenswrapper[4985]: I0125 00:06:42.620744 4985 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 25 00:06:42 crc kubenswrapper[4985]: I0125 00:06:42.620849 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.212366 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 15:38:05.447594108 +0000 UTC Jan 25 00:06:43 crc kubenswrapper[4985]: E0125 00:06:43.215680 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" interval="3.2s" Jan 25 00:06:43 crc kubenswrapper[4985]: E0125 00:06:43.490087 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": net/http: TLS handshake timeout" node="crc" Jan 25 00:06:43 crc kubenswrapper[4985]: W0125 00:06:43.678865 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.678982 4985 trace.go:236] Trace[934262956]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Jan-2026 00:06:33.677) (total time: 10001ms): Jan 25 00:06:43 crc kubenswrapper[4985]: Trace[934262956]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:06:43.678) Jan 25 00:06:43 crc kubenswrapper[4985]: Trace[934262956]: [10.001485344s] [10.001485344s] END Jan 25 00:06:43 crc kubenswrapper[4985]: E0125 00:06:43.679008 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 25 00:06:43 crc kubenswrapper[4985]: W0125 00:06:43.699163 4985 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.699322 4985 trace.go:236] Trace[1076869876]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Jan-2026 00:06:33.697) (total time: 10001ms): Jan 25 00:06:43 crc kubenswrapper[4985]: Trace[1076869876]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (00:06:43.699) Jan 25 00:06:43 crc kubenswrapper[4985]: Trace[1076869876]: [10.001726736s] [10.001726736s] END Jan 25 00:06:43 crc kubenswrapper[4985]: E0125 00:06:43.699362 4985 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.842372 4985 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.842453 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.912504 4985 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Jan 25 00:06:43 crc kubenswrapper[4985]: I0125 00:06:43.912585 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Jan 25 00:06:44 crc kubenswrapper[4985]: I0125 00:06:44.297173 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 01:12:50.180879939 +0000 UTC Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.297652 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 06:37:52.79113323 +0000 UTC Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.642853 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.643036 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.644043 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.644095 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.644143 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:45 crc kubenswrapper[4985]: I0125 00:06:45.658909 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.298518 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 14:20:19.87665364 +0000 UTC Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.321185 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.340356 4985 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.377148 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.378547 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.378606 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.378618 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.690559 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.692348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.692414 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.692440 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:46 crc kubenswrapper[4985]: I0125 00:06:46.692488 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:46 crc kubenswrapper[4985]: E0125 00:06:46.697779 4985 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Jan 25 00:06:47 crc kubenswrapper[4985]: I0125 00:06:47.028944 4985 csr.go:261] certificate signing request csr-wdxt8 is approved, waiting to be issued Jan 25 00:06:47 crc kubenswrapper[4985]: I0125 00:06:47.082254 4985 csr.go:257] certificate signing request csr-wdxt8 is issued Jan 25 00:06:47 crc kubenswrapper[4985]: I0125 00:06:47.299592 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 13:56:27.471140233 +0000 UTC Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.084318 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-25 00:01:47 +0000 UTC, rotation deadline is 2026-11-30 13:55:05.641811649 +0000 UTC Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.084684 4985 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7429h48m17.557138847s for next certificate rotation Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.135275 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.135830 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.137301 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.137470 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.137605 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.142850 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.299876 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 18:34:28.527925955 +0000 UTC Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.382060 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.382820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.383010 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.383172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.424208 4985 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.424491 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.581415 4985 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.834294 4985 trace.go:236] Trace[1131429781]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Jan-2026 00:06:34.034) (total time: 14800ms): Jan 25 00:06:48 crc kubenswrapper[4985]: Trace[1131429781]: ---"Objects listed" error: 14800ms (00:06:48.834) Jan 25 00:06:48 crc kubenswrapper[4985]: Trace[1131429781]: [14.800212273s] [14.800212273s] END Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.834319 4985 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.835249 4985 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.836587 4985 trace.go:236] Trace[58333839]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Jan-2026 00:06:34.559) (total time: 14276ms): Jan 25 00:06:48 crc kubenswrapper[4985]: Trace[58333839]: ---"Objects listed" error: 14276ms (00:06:48.836) Jan 25 00:06:48 crc kubenswrapper[4985]: Trace[58333839]: [14.276913345s] [14.276913345s] END Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.836963 4985 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.985621 4985 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 25 00:06:48 crc kubenswrapper[4985]: I0125 00:06:48.995857 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.024423 4985 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:32844->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.024471 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:32844->192.168.126.11:17697: read: connection reset by peer" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.299909 4985 apiserver.go:52] "Watching apiserver" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.301009 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 05:22:38.66413958 +0000 UTC Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.305938 4985 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.306321 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-xzbbh","openshift-machine-config-operator/machine-config-daemon-dddxc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.306697 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.306757 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.306697 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.306872 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.306938 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.306963 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.307070 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.307166 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.307252 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.307371 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.307533 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.317993 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.328391 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.328777 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.337670 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338000 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338069 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338203 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338077 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338370 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338642 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338728 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338757 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338832 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.338893 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.339033 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.340980 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.343062 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.384876 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.386387 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef" exitCode=255 Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.386432 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef"} Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.399651 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.405823 4985 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438325 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438382 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438404 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438426 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438449 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438472 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438490 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438509 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438529 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438547 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438570 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438589 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438610 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438630 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438682 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438702 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438722 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438745 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438767 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438787 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438807 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438828 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438849 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438870 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438890 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438910 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438935 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438955 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438975 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.438998 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439024 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439047 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439070 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439091 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439131 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439151 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439171 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439192 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439210 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439230 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439251 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439273 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439292 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439325 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439344 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439365 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439384 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439403 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439428 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439448 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439469 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439489 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439510 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439533 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439555 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439575 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439597 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439616 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439638 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439659 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439680 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439700 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439721 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439742 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439764 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439784 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439806 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439826 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439849 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439870 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439890 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439910 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439951 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439973 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.439992 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440012 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440033 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440054 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440075 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440096 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440137 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440159 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440182 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440203 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440223 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440244 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440266 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440295 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440316 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440337 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440359 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440379 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440401 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440422 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440443 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440466 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440490 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440510 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440531 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440552 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440575 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440596 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440617 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440640 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440662 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440684 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440707 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440728 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440749 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440771 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440792 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440816 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440839 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440859 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440880 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440902 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440924 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440947 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440968 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.440989 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441012 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441035 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441056 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441080 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441118 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441141 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441164 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441187 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441211 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441232 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441254 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441277 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441300 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441321 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441344 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441365 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441393 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441415 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441436 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441458 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441480 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441501 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441525 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441547 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441571 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441592 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441614 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441637 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441661 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441684 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441706 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441730 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441752 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441773 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441794 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441817 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441839 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441860 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441882 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441905 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441929 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441951 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441975 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.441996 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442019 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442041 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442063 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442088 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442129 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442151 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442172 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442196 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442220 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442244 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442268 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442293 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442318 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442342 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442367 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442397 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442420 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442443 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442468 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442493 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442518 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442542 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442688 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442713 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442737 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442952 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442975 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.442998 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443021 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443045 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443067 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443137 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443172 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvx5b\" (UniqueName: \"kubernetes.io/projected/5fa83abe-5c61-40a5-bf77-d8f929bdda78-kube-api-access-dvx5b\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443199 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443226 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443252 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443274 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5fa83abe-5c61-40a5-bf77-d8f929bdda78-proxy-tls\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443299 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443323 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443347 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443375 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5fa83abe-5c61-40a5-bf77-d8f929bdda78-rootfs\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443426 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443451 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443475 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27ckk\" (UniqueName: \"kubernetes.io/projected/d1879c0f-3576-4f5a-9ac2-ada68270b8da-kube-api-access-27ckk\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443502 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443526 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443549 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d1879c0f-3576-4f5a-9ac2-ada68270b8da-hosts-file\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443570 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fa83abe-5c61-40a5-bf77-d8f929bdda78-mcd-auth-proxy-config\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443593 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443620 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.443646 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.444706 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.444944 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.445169 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.449576 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.449873 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.450086 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.450312 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.450804 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.451070 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.451520 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.452019 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.452464 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.452693 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.453126 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.453513 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.453702 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.453858 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454009 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454178 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454329 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454482 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454641 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454793 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.454953 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.455124 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.455274 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.455428 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.455603 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.455974 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456194 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456350 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456502 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456661 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456814 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.456978 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457143 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457302 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457471 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457614 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457776 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.457923 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458086 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458178 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458290 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458407 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458588 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458662 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458814 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.458954 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.459021 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.459522 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.459685 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.459753 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.459893 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460032 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460175 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460272 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460320 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460445 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460594 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460658 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460789 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460827 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.460929 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461054 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461195 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461340 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461365 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461552 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461727 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461760 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461892 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.461899 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462075 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462119 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462265 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462296 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462392 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462474 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462520 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462729 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.462780 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.463057 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.463278 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.463491 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.463696 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.463988 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.464452 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.464689 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.464750 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.464873 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.464959 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465055 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465202 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465340 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465579 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465621 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465749 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.465909 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.466198 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.466525 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.466756 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.467169 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.467404 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.467786 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.468131 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.468478 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.468755 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.469011 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.469201 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.469367 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.469755 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.470187 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.470437 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.470576 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.471017 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.471363 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.471905 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.472545 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.472888 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.473129 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.473391 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.473635 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.473832 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.473881 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.474063 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.474132 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.474232 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:49.974165341 +0000 UTC m=+20.006101614 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.474434 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.474486 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.474786 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.474898 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.475257 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.475262 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.475317 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:06:49.975308302 +0000 UTC m=+20.007244575 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.475615 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.475664 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.475788 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.476065 4985 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.476825 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.476896 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.476950 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:49.976934934 +0000 UTC m=+20.008871207 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.477720 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.478114 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.478655 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.479189 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.479433 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.479679 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.479923 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480231 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480250 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480573 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480592 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480863 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.480874 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.481097 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.481130 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.481630 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.481890 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482023 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482084 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482322 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482450 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482504 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.482602 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.483915 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.484308 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.484576 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.484807 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.485062 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.485378 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.485602 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486045 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486193 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486265 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486626 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486952 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.486977 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.487175 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.487439 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.487686 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.487979 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.488200 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.488392 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.488483 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.488767 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.489157 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491010 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491170 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491296 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491631 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491765 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491899 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.491905 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.492043 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.492934 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.495271 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.495410 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.502932 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.520124 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.522190 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.522243 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.531376 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.531411 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.531425 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.531480 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:50.031462071 +0000 UTC m=+20.063398344 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.534936 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.535386 4985 scope.go:117] "RemoveContainer" containerID="fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.535666 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.536630 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.537395 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.539192 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.541511 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.541540 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.541554 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:49 crc kubenswrapper[4985]: E0125 00:06:49.541599 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:50.041583887 +0000 UTC m=+20.073520160 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544381 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544881 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27ckk\" (UniqueName: \"kubernetes.io/projected/d1879c0f-3576-4f5a-9ac2-ada68270b8da-kube-api-access-27ckk\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544907 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d1879c0f-3576-4f5a-9ac2-ada68270b8da-hosts-file\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544927 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fa83abe-5c61-40a5-bf77-d8f929bdda78-mcd-auth-proxy-config\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544941 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544963 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvx5b\" (UniqueName: \"kubernetes.io/projected/5fa83abe-5c61-40a5-bf77-d8f929bdda78-kube-api-access-dvx5b\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.544985 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5fa83abe-5c61-40a5-bf77-d8f929bdda78-proxy-tls\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545009 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545027 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5fa83abe-5c61-40a5-bf77-d8f929bdda78-rootfs\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545078 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545091 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545117 4985 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545127 4985 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545135 4985 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545144 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545153 4985 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545162 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545173 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545183 4985 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545192 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545202 4985 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545212 4985 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545221 4985 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545229 4985 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545248 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545257 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545265 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545274 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545282 4985 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545290 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545297 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545307 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545315 4985 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545322 4985 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545330 4985 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545339 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545347 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545356 4985 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545364 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545373 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545381 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545389 4985 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545397 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545405 4985 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545413 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545421 4985 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545429 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545438 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545446 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545455 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545464 4985 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545472 4985 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545481 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545489 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545497 4985 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545505 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545513 4985 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545522 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545530 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545539 4985 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545548 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545558 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545567 4985 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545575 4985 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545583 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545590 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545599 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545608 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545616 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545624 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545632 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545639 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545647 4985 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545655 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545664 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545673 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545681 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545691 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545698 4985 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545706 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545715 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545722 4985 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545730 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545737 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545745 4985 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545753 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545761 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545768 4985 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545775 4985 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545783 4985 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545791 4985 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545799 4985 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545807 4985 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545815 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545823 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545830 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545848 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545857 4985 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545866 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545875 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545883 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545892 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545900 4985 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545908 4985 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545917 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545925 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545932 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545940 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545948 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545956 4985 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545965 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545972 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545980 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545987 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.545995 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546003 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546011 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546019 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546026 4985 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546034 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546042 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546049 4985 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546058 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546065 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546073 4985 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546081 4985 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546089 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546096 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546118 4985 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546127 4985 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546134 4985 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546142 4985 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546150 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546159 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546167 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546174 4985 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546183 4985 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546190 4985 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546198 4985 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546206 4985 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546213 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546221 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546230 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546237 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546245 4985 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546252 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546260 4985 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546267 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546276 4985 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546284 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546291 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546298 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546306 4985 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546314 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546322 4985 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546330 4985 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546338 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546345 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546353 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546362 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546370 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546380 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546388 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546397 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546406 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546413 4985 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546421 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546429 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546436 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546444 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546451 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546459 4985 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546467 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546475 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546483 4985 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546490 4985 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546499 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546507 4985 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546515 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546522 4985 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546530 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546538 4985 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546546 4985 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546554 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546561 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546569 4985 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546578 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546586 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546594 4985 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546602 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546626 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546636 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546645 4985 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546653 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546662 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546669 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546677 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546686 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546694 4985 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546702 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546710 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546718 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546751 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5fa83abe-5c61-40a5-bf77-d8f929bdda78-rootfs\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.546965 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d1879c0f-3576-4f5a-9ac2-ada68270b8da-hosts-file\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.548642 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5fa83abe-5c61-40a5-bf77-d8f929bdda78-mcd-auth-proxy-config\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.548738 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.550986 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5fa83abe-5c61-40a5-bf77-d8f929bdda78-proxy-tls\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.551030 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.552702 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.552788 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.566977 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27ckk\" (UniqueName: \"kubernetes.io/projected/d1879c0f-3576-4f5a-9ac2-ada68270b8da-kube-api-access-27ckk\") pod \"node-resolver-xzbbh\" (UID: \"d1879c0f-3576-4f5a-9ac2-ada68270b8da\") " pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.568340 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvx5b\" (UniqueName: \"kubernetes.io/projected/5fa83abe-5c61-40a5-bf77-d8f929bdda78-kube-api-access-dvx5b\") pod \"machine-config-daemon-dddxc\" (UID: \"5fa83abe-5c61-40a5-bf77-d8f929bdda78\") " pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.576220 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.591442 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-dt2mv"] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.591938 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-4w9l7"] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.592119 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.592319 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.596940 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.598188 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.598452 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.598665 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.598808 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.598962 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.599118 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.599267 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.610738 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.624528 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.625680 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.641351 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.645518 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.647910 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-system-cni-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.647946 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-binary-copy\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.647972 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-cni-binary-copy\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.647996 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5zfx\" (UniqueName: \"kubernetes.io/projected/0294dfed-64df-4d3c-92de-7a93787780a2-kube-api-access-t5zfx\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648017 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-hostroot\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648050 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-system-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648075 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-bin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648094 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-etc-kubernetes\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648130 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-cnibin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648152 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-multus\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648171 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-kubelet\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648199 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648259 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-os-release\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648281 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648305 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8zxg\" (UniqueName: \"kubernetes.io/projected/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-kube-api-access-n8zxg\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648336 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-k8s-cni-cncf-io\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648356 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648379 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-multus-certs\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648401 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cnibin\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648424 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-netns\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648444 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-conf-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648464 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-multus-daemon-config\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648486 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-os-release\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648521 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-socket-dir-parent\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.648549 4985 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.653786 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.656916 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.662512 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.666828 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 25 00:06:49 crc kubenswrapper[4985]: W0125 00:06:49.670276 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ab3105758e59c7203feb7eb14aafb6979c561e67ac7d4b682757f55cd5493de4 WatchSource:0}: Error finding container ab3105758e59c7203feb7eb14aafb6979c561e67ac7d4b682757f55cd5493de4: Status 404 returned error can't find the container with id ab3105758e59c7203feb7eb14aafb6979c561e67ac7d4b682757f55cd5493de4 Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.674543 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.675082 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-xzbbh" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.685193 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: W0125 00:06:49.689409 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fa83abe_5c61_40a5_bf77_d8f929bdda78.slice/crio-46c3fcea96b23f4f0e76bc3705f30af93194f129f983bcef8a40b50c1f8d611c WatchSource:0}: Error finding container 46c3fcea96b23f4f0e76bc3705f30af93194f129f983bcef8a40b50c1f8d611c: Status 404 returned error can't find the container with id 46c3fcea96b23f4f0e76bc3705f30af93194f129f983bcef8a40b50c1f8d611c Jan 25 00:06:49 crc kubenswrapper[4985]: W0125 00:06:49.692232 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-3b2722e6cd3fbb65185f13d2953ebd6953bd4c45dfc0fe9647c160f4a75aa973 WatchSource:0}: Error finding container 3b2722e6cd3fbb65185f13d2953ebd6953bd4c45dfc0fe9647c160f4a75aa973: Status 404 returned error can't find the container with id 3b2722e6cd3fbb65185f13d2953ebd6953bd4c45dfc0fe9647c160f4a75aa973 Jan 25 00:06:49 crc kubenswrapper[4985]: W0125 00:06:49.692396 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-e7de8f9511bc05bf6caaa8b59c95c1f38be68f972cfedc689389a206326efd12 WatchSource:0}: Error finding container e7de8f9511bc05bf6caaa8b59c95c1f38be68f972cfedc689389a206326efd12: Status 404 returned error can't find the container with id e7de8f9511bc05bf6caaa8b59c95c1f38be68f972cfedc689389a206326efd12 Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.697238 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.708714 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.718352 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.729804 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755565 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755870 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-bin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755905 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-etc-kubernetes\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755922 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-cnibin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755947 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-multus\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755970 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-kubelet\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.755998 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756012 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-os-release\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756049 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756069 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8zxg\" (UniqueName: \"kubernetes.io/projected/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-kube-api-access-n8zxg\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756097 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-k8s-cni-cncf-io\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756136 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756162 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-multus-certs\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756186 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cnibin\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756384 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756481 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-bin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756514 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-etc-kubernetes\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756555 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-cnibin\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756586 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-cni-multus\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756617 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-var-lib-kubelet\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756651 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-multus-certs\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.756947 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757011 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-tuning-conf-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757040 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-os-release\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757055 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cnibin\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757136 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-netns\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757364 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-k8s-cni-cncf-io\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757424 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-host-run-netns\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757466 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-conf-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757489 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-multus-daemon-config\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.757559 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-conf-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762329 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-multus-daemon-config\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762376 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-os-release\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762431 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-os-release\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762453 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-socket-dir-parent\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762479 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-system-cni-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762496 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-binary-copy\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762512 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-cni-binary-copy\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762526 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5zfx\" (UniqueName: \"kubernetes.io/projected/0294dfed-64df-4d3c-92de-7a93787780a2-kube-api-access-t5zfx\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762542 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-hostroot\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762567 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-system-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762635 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-system-cni-dir\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762669 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-multus-socket-dir-parent\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.762690 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-system-cni-dir\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.763128 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-cni-binary-copy\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.763492 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/0294dfed-64df-4d3c-92de-7a93787780a2-cni-binary-copy\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.763731 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/0294dfed-64df-4d3c-92de-7a93787780a2-hostroot\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.782377 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8zxg\" (UniqueName: \"kubernetes.io/projected/c4a302c2-5f69-46d5-b4da-7e4306ea3a3d-kube-api-access-n8zxg\") pod \"multus-additional-cni-plugins-dt2mv\" (UID: \"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\") " pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.786607 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5zfx\" (UniqueName: \"kubernetes.io/projected/0294dfed-64df-4d3c-92de-7a93787780a2-kube-api-access-t5zfx\") pod \"multus-4w9l7\" (UID: \"0294dfed-64df-4d3c-92de-7a93787780a2\") " pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.907909 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-4w9l7" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.912973 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" Jan 25 00:06:49 crc kubenswrapper[4985]: W0125 00:06:49.939484 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc4a302c2_5f69_46d5_b4da_7e4306ea3a3d.slice/crio-688267b5378504cc1e577903e90e74a68d19c3f94c903ed3ca6c5d95d113994a WatchSource:0}: Error finding container 688267b5378504cc1e577903e90e74a68d19c3f94c903ed3ca6c5d95d113994a: Status 404 returned error can't find the container with id 688267b5378504cc1e577903e90e74a68d19c3f94c903ed3ca6c5d95d113994a Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.956260 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cc28q"] Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.957623 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.961320 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.961687 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.961693 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.961834 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.961948 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.962197 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 25 00:06:49 crc kubenswrapper[4985]: I0125 00:06:49.963997 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.008453 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071067 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071736 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071837 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071866 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071892 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071908 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071927 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071943 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071957 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071973 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.071995 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072010 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072025 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072038 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072052 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072068 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072091 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072127 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072143 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072159 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072177 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqbqw\" (UniqueName: \"kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072192 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072218 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072234 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072252 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.072268 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072361 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:06:51.072348332 +0000 UTC m=+21.104284605 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072404 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072427 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:51.072421104 +0000 UTC m=+21.104357377 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072496 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072507 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072523 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072548 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:51.072541117 +0000 UTC m=+21.104477390 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072615 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072625 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072631 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072649 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:51.07264407 +0000 UTC m=+21.104580343 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072716 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.072737 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:51.072731702 +0000 UTC m=+21.104667975 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.119023 4985 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119320 4985 reflector.go:484] object-"openshift-multus"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119357 4985 reflector.go:484] object-"openshift-network-node-identity"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119383 4985 reflector.go:484] object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119407 4985 reflector.go:484] object-"openshift-network-node-identity"/"ovnkube-identity-cm": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"ovnkube-identity-cm": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119429 4985 reflector.go:484] object-"openshift-machine-config-operator"/"kube-rbac-proxy": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-rbac-proxy": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.119453 4985 reflector.go:484] object-"openshift-dns"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120218 4985 reflector.go:484] object-"openshift-multus"/"multus-daemon-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"multus-daemon-config": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120260 4985 reflector.go:484] object-"openshift-network-node-identity"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120291 4985 reflector.go:484] object-"openshift-network-operator"/"iptables-alerter-script": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"iptables-alerter-script": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120320 4985 reflector.go:484] object-"openshift-network-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120520 4985 reflector.go:484] object-"openshift-multus"/"default-dockercfg-2q5b6": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"default-dockercfg-2q5b6": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120549 4985 reflector.go:484] object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": watch of *v1.Secret ended with: very short watch: object-"openshift-dns"/"node-resolver-dockercfg-kz9s7": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.120578 4985 reflector.go:484] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.120779 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc/status\": read tcp 38.102.83.196:50548->38.102.83.196:6443: use of closed network connection" Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121434 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121460 4985 reflector.go:484] object-"openshift-network-node-identity"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-node-identity"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121449 4985 reflector.go:484] object-"openshift-multus"/"cni-copy-resources": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"cni-copy-resources": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121508 4985 reflector.go:484] object-"openshift-machine-config-operator"/"proxy-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-machine-config-operator"/"proxy-tls": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121532 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121538 4985 reflector.go:484] object-"openshift-multus"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121564 4985 reflector.go:484] object-"openshift-network-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-network-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121599 4985 reflector.go:484] object-"openshift-machine-config-operator"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121623 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121657 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-config": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121680 4985 reflector.go:484] object-"openshift-multus"/"default-cni-sysctl-allowlist": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"default-cni-sysctl-allowlist": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: E0125 00:06:50.121589 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-diagnostics/events/network-check-target-xd92c.188dd09ee75bfd9b\": read tcp 38.102.83.196:50548->38.102.83.196:6443: use of closed network connection" event="&Event{ObjectMeta:{network-check-target-xd92c.188dd09ee75bfd9b openshift-network-diagnostics 26515 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-network-diagnostics,Name:network-check-target-xd92c,UID:3b6479f0-333b-4a96-9adf-2099afdc2447,APIVersion:v1,ResourceVersion:25004,FieldPath:,},Reason:FailedMount,Message:MountVolume.SetUp failed for volume \"kube-api-access-cqllr\" : [object \"openshift-network-diagnostics\"/\"kube-root-ca.crt\" not registered, object \"openshift-network-diagnostics\"/\"openshift-service-ca.crt\" not registered],Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:06:49 +0000 UTC,LastTimestamp:2026-01-25 00:06:50.07264229 +0000 UTC m=+20.104578563,Count:2,Type:Warning,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121683 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121705 4985 reflector.go:484] object-"openshift-network-node-identity"/"network-node-identity-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-network-node-identity"/"network-node-identity-cert": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121712 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121730 4985 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121736 4985 reflector.go:484] object-"openshift-machine-config-operator"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-machine-config-operator"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121492 4985 reflector.go:484] object-"openshift-network-operator"/"metrics-tls": watch of *v1.Secret ended with: very short watch: object-"openshift-network-operator"/"metrics-tls": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.121778 4985 reflector.go:484] object-"openshift-dns"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-dns"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.153052 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.155275 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.166378 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.166390 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174721 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174768 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174788 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174807 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174822 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174850 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174865 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174879 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174896 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174916 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174930 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174943 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174957 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174980 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.174993 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175009 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqbqw\" (UniqueName: \"kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175026 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175039 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175052 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175068 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175146 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175187 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175209 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175426 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175479 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175818 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175859 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175860 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175883 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175896 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175912 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175921 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175936 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.175944 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.176258 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.176273 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.176331 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.176313 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.176389 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.183308 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.191014 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqbqw\" (UniqueName: \"kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw\") pod \"ovnkube-node-cc28q\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.198954 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.210888 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.234961 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.243564 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.258344 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.271571 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.277935 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.278671 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.279340 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.279948 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.280545 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.281042 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.281699 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.282224 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.282868 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.284513 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.284839 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.285389 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.286439 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.286951 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.287445 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.290719 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.291276 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.292359 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.292807 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.293892 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.293877 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.294862 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.295340 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.296259 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.296703 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.297726 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.298213 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.298784 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.299828 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.300299 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.301503 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.301991 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.301963 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 15:32:31.46750575 +0000 UTC Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.302634 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.302824 4985 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.302926 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.304492 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.305366 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.305829 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.307231 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.307861 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.308801 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.309506 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.310558 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.310985 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.312001 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.312586 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.313574 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.314001 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.314843 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.315378 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.316418 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.316884 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.317702 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.318148 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.319000 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.319537 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.319971 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.326772 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.346998 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.356780 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: W0125 00:06:50.359791 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod64cc3123_ba76_4365_86ae_c4cf7c09a805.slice/crio-a3105f031b5159130a4c5d5b1210ece101c40faf3453dd470fc723525955aa14 WatchSource:0}: Error finding container a3105f031b5159130a4c5d5b1210ece101c40faf3453dd470fc723525955aa14: Status 404 returned error can't find the container with id a3105f031b5159130a4c5d5b1210ece101c40faf3453dd470fc723525955aa14 Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.386469 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.397133 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerStarted","Data":"2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.397187 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerStarted","Data":"434f94871a4bbbd0a76e77618ff100437633fe939fad57fce3770bb0b94cc47e"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.407237 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xzbbh" event={"ID":"d1879c0f-3576-4f5a-9ac2-ada68270b8da","Type":"ContainerStarted","Data":"abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.407271 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-xzbbh" event={"ID":"d1879c0f-3576-4f5a-9ac2-ada68270b8da","Type":"ContainerStarted","Data":"f0916656b4a155c2b4a145874b9fb26655ad365de4570ec67f5156b5f4ced20a"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.416344 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.420833 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.420870 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ab3105758e59c7203feb7eb14aafb6979c561e67ac7d4b682757f55cd5493de4"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.431281 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"a3105f031b5159130a4c5d5b1210ece101c40faf3453dd470fc723525955aa14"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.434634 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"e7de8f9511bc05bf6caaa8b59c95c1f38be68f972cfedc689389a206326efd12"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.435789 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.438005 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.439804 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.440149 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.442223 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerStarted","Data":"99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.442252 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerStarted","Data":"688267b5378504cc1e577903e90e74a68d19c3f94c903ed3ca6c5d95d113994a"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.443798 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.443820 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.443829 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"46c3fcea96b23f4f0e76bc3705f30af93194f129f983bcef8a40b50c1f8d611c"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.445445 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.445498 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.445511 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3b2722e6cd3fbb65185f13d2953ebd6953bd4c45dfc0fe9647c160f4a75aa973"} Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.456943 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.465236 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.474029 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.494799 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.514787 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.523389 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.537842 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.547688 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.561772 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.571946 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.583183 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.606062 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.617174 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.633263 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.644348 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.655162 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.666066 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.677746 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.688196 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.699082 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.709534 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.721088 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.729865 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.740840 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.753199 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.766674 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.780875 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.800950 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.815094 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.840907 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:50 crc kubenswrapper[4985]: I0125 00:06:50.873098 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.013804 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.030254 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.030592 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.043233 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.059207 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.085430 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.086538 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.087513 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087654 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:06:53.087634681 +0000 UTC m=+23.119570974 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.087684 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.087723 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.087751 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.087779 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087894 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087914 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087928 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087953 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087972 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:53.08796275 +0000 UTC m=+23.119899033 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088030 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:53.088007271 +0000 UTC m=+23.119943614 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088028 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088060 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088072 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088143 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:53.088126274 +0000 UTC m=+23.120062547 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.087902 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.088316 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:53.088305149 +0000 UTC m=+23.120241422 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.091301 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.109574 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.176550 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.182289 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.199571 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.201058 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.215290 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.235883 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.251564 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.269022 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.273164 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.274376 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.274467 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.274641 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.274683 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.274731 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:51 crc kubenswrapper[4985]: E0125 00:06:51.274815 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.302669 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 16:06:30.001523736 +0000 UTC Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.310213 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.313746 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.339761 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.339899 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.403038 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.430529 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.441765 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.450215 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11" exitCode=0 Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.450338 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11"} Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.452092 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" exitCode=0 Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.452332 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.485774 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.508765 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.528255 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.549173 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.574742 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.607366 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.611201 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.662243 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.662490 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.682282 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.722464 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.748008 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.770606 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.813015 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.855262 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.889929 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.935946 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:51 crc kubenswrapper[4985]: I0125 00:06:51.970921 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:51Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.013717 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.049676 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.093504 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.133982 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.179893 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.210529 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.249126 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.294473 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.303457 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 20:39:28.57875807 +0000 UTC Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.337252 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.372165 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.414081 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.454841 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460456 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460498 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460518 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460529 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460539 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.460549 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.462736 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c" exitCode=0 Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.462769 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c"} Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.495606 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.533215 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.572571 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.611947 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.649960 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.677591 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-fcpqg"] Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.677965 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.690811 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.702268 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.721744 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.742139 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.761493 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.806492 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e6a1e426-cc25-4015-ab79-402c7eecfafa-serviceca\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.806596 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb7b5\" (UniqueName: \"kubernetes.io/projected/e6a1e426-cc25-4015-ab79-402c7eecfafa-kube-api-access-pb7b5\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.806642 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6a1e426-cc25-4015-ab79-402c7eecfafa-host\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.811090 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.859508 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.898728 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.907397 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6a1e426-cc25-4015-ab79-402c7eecfafa-host\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.907445 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e6a1e426-cc25-4015-ab79-402c7eecfafa-serviceca\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.907492 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb7b5\" (UniqueName: \"kubernetes.io/projected/e6a1e426-cc25-4015-ab79-402c7eecfafa-kube-api-access-pb7b5\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.907643 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e6a1e426-cc25-4015-ab79-402c7eecfafa-host\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.910540 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/e6a1e426-cc25-4015-ab79-402c7eecfafa-serviceca\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.930841 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.960217 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb7b5\" (UniqueName: \"kubernetes.io/projected/e6a1e426-cc25-4015-ab79-402c7eecfafa-kube-api-access-pb7b5\") pod \"node-ca-fcpqg\" (UID: \"e6a1e426-cc25-4015-ab79-402c7eecfafa\") " pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.990026 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:52Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:52 crc kubenswrapper[4985]: I0125 00:06:52.992505 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-fcpqg" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.040898 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.076273 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.098772 4985 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.100999 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.101050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.101064 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.101189 4985 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.108942 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.109071 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109174 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109490 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:06:57.109124072 +0000 UTC m=+27.141060345 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109527 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:57.109519952 +0000 UTC m=+27.141456225 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.109556 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.109607 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.109647 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109676 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109732 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:57.109716237 +0000 UTC m=+27.141652510 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109785 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109803 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109808 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109816 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109821 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109833 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109864 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:57.109848751 +0000 UTC m=+27.141785034 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.109882 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:06:57.109874782 +0000 UTC m=+27.141811055 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.112648 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.162369 4985 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.162613 4985 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.163569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.163585 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.163593 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.163605 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.163613 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.180656 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.183900 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.183939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.183949 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.183964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.183974 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.188917 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.194024 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.197172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.197199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.197208 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.197222 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.197232 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.208379 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.216962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.217037 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.217065 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.217096 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.217149 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.229308 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.232654 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.236869 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.236905 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.236915 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.236930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.236941 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.248036 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.248214 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.249893 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.249936 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.249957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.249984 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.250003 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.272233 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.274277 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.274324 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.274416 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.274440 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.274493 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:53 crc kubenswrapper[4985]: E0125 00:06:53.274538 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.304455 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 15:08:10.678684393 +0000 UTC Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.310758 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.350879 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.352569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.352730 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.352860 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.352958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.353022 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.392719 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.435977 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.455321 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.455371 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.455389 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.455414 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.455432 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.468039 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fcpqg" event={"ID":"e6a1e426-cc25-4015-ab79-402c7eecfafa","Type":"ContainerStarted","Data":"aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.468119 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-fcpqg" event={"ID":"e6a1e426-cc25-4015-ab79-402c7eecfafa","Type":"ContainerStarted","Data":"d53b9aef95c8db9a037f0ab9bc3b4652aec796142b4ce5749b24b5fb112f2cf1"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.470461 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.475375 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac" exitCode=0 Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.475452 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.480830 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.518013 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.556312 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.557939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.557983 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.557995 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.558013 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.558025 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.593540 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.635267 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.661052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.661130 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.661143 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.661175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.661191 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.673954 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.747405 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.761554 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.763339 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.763387 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.763397 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.763411 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.763420 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.797198 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.838283 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.865710 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.865749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.865758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.865771 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.865779 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.875066 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.913841 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.954152 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.968964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.968999 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.969012 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.969043 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.969057 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:53Z","lastTransitionTime":"2026-01-25T00:06:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:53 crc kubenswrapper[4985]: I0125 00:06:53.993914 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:53Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.045492 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.071635 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.071682 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.071716 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.071741 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.071759 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.074585 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.119283 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.157477 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.177379 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.177417 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.177428 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.177446 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.177460 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.203654 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.235943 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.274825 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.280533 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.280601 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.280627 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.280657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.280683 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.305404 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 13:53:40.269446352 +0000 UTC Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.318570 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.383515 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.383562 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.383579 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.383599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.383612 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.483244 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605" exitCode=0 Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.483354 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.486398 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.486444 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.486463 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.486489 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.486506 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.491837 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.531371 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.550842 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.570522 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591164 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591305 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591329 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.591519 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.611057 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.624944 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.643937 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.667328 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.697045 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.699475 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.699530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.699549 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.699574 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.699593 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.719239 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.757969 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.793685 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.803691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.803754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.803775 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.803801 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.803822 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.828093 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.875586 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.906445 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.906505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.906521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.906544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:54 crc kubenswrapper[4985]: I0125 00:06:54.906560 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:54Z","lastTransitionTime":"2026-01-25T00:06:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.009191 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.009235 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.009247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.009264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.009276 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.114294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.114341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.114353 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.114371 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.114383 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.216533 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.216760 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.216830 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.216897 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.216960 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.274150 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:55 crc kubenswrapper[4985]: E0125 00:06:55.275229 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.274194 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:55 crc kubenswrapper[4985]: E0125 00:06:55.275495 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.274164 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:55 crc kubenswrapper[4985]: E0125 00:06:55.275709 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.306343 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 05:18:04.627762232 +0000 UTC Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.323087 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.323138 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.323151 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.323166 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.323177 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.424947 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.424997 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.425011 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.425027 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.425039 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.426544 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.432030 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.442062 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.453261 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.468282 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.489646 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.502010 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerStarted","Data":"42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.509212 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.522761 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.527523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.527582 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.527592 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.527606 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.527615 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.536685 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.553465 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.562728 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.580280 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"message\\\":\\\"containers with unready status: [cluster-policy-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.592618 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.604539 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.617770 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.627512 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.629956 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.630006 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.630021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.630042 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.630055 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.640433 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.651820 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.661811 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.687598 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.714296 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.723834 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.733065 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.733120 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.733133 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.733146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.733154 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.737113 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.749446 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.794916 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.831919 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.836162 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.836245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.836274 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.836347 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.836377 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.876463 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.914172 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.938326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.938385 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.938401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.938428 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.938444 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:55Z","lastTransitionTime":"2026-01-25T00:06:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.951756 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:55 crc kubenswrapper[4985]: I0125 00:06:55.991489 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:55Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.041291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.041413 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.041440 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.041473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.041496 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.144185 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.144237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.144249 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.144265 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.144278 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.247441 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.247498 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.247535 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.247564 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.247587 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.307508 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 23:50:40.853582688 +0000 UTC Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.351518 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.351587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.351605 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.351628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.351647 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.455651 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.456385 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.456534 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.456696 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.456827 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.511760 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0" exitCode=0 Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.511852 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.541384 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.559061 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.559139 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.559156 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.559179 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.559194 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.570484 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.587358 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.605774 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.621311 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.634366 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.647788 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.664823 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.665952 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.665985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.665999 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.666016 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.666028 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.681630 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.698799 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.712743 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.732959 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.741773 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.752321 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:56Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.768795 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.768854 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.768870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.768894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.768911 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.872010 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.872087 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.872167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.872202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.872224 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.975952 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.975998 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.976017 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.976037 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:56 crc kubenswrapper[4985]: I0125 00:06:56.976051 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:56Z","lastTransitionTime":"2026-01-25T00:06:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.079171 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.079239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.079259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.079287 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.079306 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.164154 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.164353 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164409 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.164371676 +0000 UTC m=+35.196307979 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.164484 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164521 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164550 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.164553 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164570 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.164617 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164638 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.164615193 +0000 UTC m=+35.196551496 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164705 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164712 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164746 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.164732316 +0000 UTC m=+35.196668629 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164798 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.164771597 +0000 UTC m=+35.196707970 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164809 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164833 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164853 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.164920 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.164905731 +0000 UTC m=+35.196842044 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.182275 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.182332 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.182349 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.182370 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.182388 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.274432 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.274435 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.274470 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.274692 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.274788 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:57 crc kubenswrapper[4985]: E0125 00:06:57.274929 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.285956 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.286019 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.286039 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.286065 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.286082 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.308327 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 19:39:03.245626869 +0000 UTC Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.389664 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.389722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.389737 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.389758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.389774 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.492975 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.493039 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.493066 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.493094 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.493155 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.519470 4985 generic.go:334] "Generic (PLEG): container finished" podID="c4a302c2-5f69-46d5-b4da-7e4306ea3a3d" containerID="eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd" exitCode=0 Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.519541 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerDied","Data":"eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.529758 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.530215 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.530248 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.547723 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.562955 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.567701 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.584001 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.597976 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.598027 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.598045 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.598069 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.598091 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.603124 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.624537 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.639791 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.652690 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.667760 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.694276 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.701047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.701082 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.701093 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.701130 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.701145 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.708087 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.719520 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.731702 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.742891 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.754065 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.767780 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.779149 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.794807 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.803050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.803097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.803134 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.803150 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.803160 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.816135 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.838940 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.870893 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.885771 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.897809 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.905135 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.905161 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.905169 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.905181 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.905190 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:57Z","lastTransitionTime":"2026-01-25T00:06:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.907300 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.920822 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.930786 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.939619 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.949752 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:57 crc kubenswrapper[4985]: I0125 00:06:57.962150 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:57Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.007767 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.007811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.007823 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.007838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.007849 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.110483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.110563 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.110579 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.110604 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.110624 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.213061 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.213160 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.213174 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.213192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.213232 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.308661 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 01:50:46.88435046 +0000 UTC Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.316199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.316296 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.316361 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.316384 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.316447 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.419439 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.419477 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.419486 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.419502 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.419512 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.521753 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.521817 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.521837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.521863 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.521882 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.535181 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" event={"ID":"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d","Type":"ContainerStarted","Data":"f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.535527 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.549456 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.556982 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.560975 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.573790 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.583530 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.594228 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.604149 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.616699 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.624504 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.624541 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.624552 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.624568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.624580 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.630579 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.647700 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.666211 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.686586 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.717955 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.726615 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.726657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.726669 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.726684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.726695 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.732765 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.750467 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.765858 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.781345 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.794906 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.816090 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.828938 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.828993 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.829013 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.829038 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.829056 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.833608 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.850238 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.867562 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.885371 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.902358 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.917491 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.931977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.932041 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.932064 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.932088 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.932142 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:58Z","lastTransitionTime":"2026-01-25T00:06:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.934572 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.948937 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.966240 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:58 crc kubenswrapper[4985]: I0125 00:06:58.985655 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:58Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.035074 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.035204 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.035256 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.035270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.035279 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.137988 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.138038 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.138050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.138067 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.138081 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.240185 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.240231 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.240243 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.240259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.240269 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.274605 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.274656 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.274606 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:06:59 crc kubenswrapper[4985]: E0125 00:06:59.274743 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:06:59 crc kubenswrapper[4985]: E0125 00:06:59.274857 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:06:59 crc kubenswrapper[4985]: E0125 00:06:59.274943 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.309873 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 22:08:36.89088368 +0000 UTC Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.343658 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.343717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.343739 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.343768 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.343790 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.446608 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.446661 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.446677 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.446695 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.446709 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.541042 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/0.log" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.544232 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b" exitCode=1 Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.544289 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.545344 4985 scope.go:117] "RemoveContainer" containerID="9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.548649 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.548686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.548703 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.548725 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.548743 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.570695 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.587325 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.610279 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.631706 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.652236 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.654047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.654101 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.654148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.654228 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.654246 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.674755 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.699880 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.715785 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.730758 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.743618 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.756802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.756820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.756828 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.756844 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.756852 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.774875 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:06:59Z\\\",\\\"message\\\":\\\"2 6297 handler.go:208] Removed *v1.Node event handler 2\\\\nI0125 00:06:59.097478 6297 handler.go:208] Removed *v1.Node event handler 7\\\\nI0125 00:06:59.097501 6297 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097546 6297 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097585 6297 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:06:59.098077 6297 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0125 00:06:59.098089 6297 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0125 00:06:59.098119 6297 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0125 00:06:59.098132 6297 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0125 00:06:59.098153 6297 factory.go:656] Stopping watch factory\\\\nI0125 00:06:59.098166 6297 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0125 00:06:59.098174 6297 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0125 00:06:59.098198 6297 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0125 00:06:59.098231 6297 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.791165 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.804605 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.824410 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:06:59Z is after 2025-08-24T17:21:41Z" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.859221 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.859264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.859275 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.859295 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.859309 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.961273 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.961317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.961327 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.961346 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:06:59 crc kubenswrapper[4985]: I0125 00:06:59.961358 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:06:59Z","lastTransitionTime":"2026-01-25T00:06:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.063426 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.063468 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.063480 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.063496 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.063508 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.165217 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.165266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.165279 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.165295 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.165307 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.268046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.268080 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.268089 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.268121 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.268132 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.310067 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 17:44:01.95796538 +0000 UTC Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.310236 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:06:59Z\\\",\\\"message\\\":\\\"2 6297 handler.go:208] Removed *v1.Node event handler 2\\\\nI0125 00:06:59.097478 6297 handler.go:208] Removed *v1.Node event handler 7\\\\nI0125 00:06:59.097501 6297 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097546 6297 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097585 6297 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:06:59.098077 6297 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0125 00:06:59.098089 6297 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0125 00:06:59.098119 6297 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0125 00:06:59.098132 6297 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0125 00:06:59.098153 6297 factory.go:656] Stopping watch factory\\\\nI0125 00:06:59.098166 6297 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0125 00:06:59.098174 6297 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0125 00:06:59.098198 6297 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0125 00:06:59.098231 6297 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.321797 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.340276 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.361577 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.372929 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.372972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.372987 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.373008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.373023 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.380466 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.396433 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.411099 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.425042 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.440644 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.457432 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474966 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474856 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.474987 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.487134 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.498217 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.512465 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.548927 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/0.log" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.551945 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.552499 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.569591 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.577373 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.577421 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.577437 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.577456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.577471 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.590767 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.623988 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:06:59Z\\\",\\\"message\\\":\\\"2 6297 handler.go:208] Removed *v1.Node event handler 2\\\\nI0125 00:06:59.097478 6297 handler.go:208] Removed *v1.Node event handler 7\\\\nI0125 00:06:59.097501 6297 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097546 6297 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097585 6297 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:06:59.098077 6297 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0125 00:06:59.098089 6297 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0125 00:06:59.098119 6297 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0125 00:06:59.098132 6297 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0125 00:06:59.098153 6297 factory.go:656] Stopping watch factory\\\\nI0125 00:06:59.098166 6297 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0125 00:06:59.098174 6297 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0125 00:06:59.098198 6297 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0125 00:06:59.098231 6297 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.639330 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.658656 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.674431 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.679853 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.679905 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.679923 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.679946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.679965 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.694740 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.707620 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.721459 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.739936 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.758712 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.773397 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.782380 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.782427 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.782444 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.782464 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.782478 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.789984 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.806027 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.886074 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.886171 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.886189 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.886216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.886233 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.989745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.989806 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.989827 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.989852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:00 crc kubenswrapper[4985]: I0125 00:07:00.989870 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:00Z","lastTransitionTime":"2026-01-25T00:07:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.093831 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.093895 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.093914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.093939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.093958 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.197315 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.197703 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.197896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.198095 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.198299 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.274479 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:01 crc kubenswrapper[4985]: E0125 00:07:01.274625 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.274700 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:01 crc kubenswrapper[4985]: E0125 00:07:01.274959 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.274507 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:01 crc kubenswrapper[4985]: E0125 00:07:01.275623 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.301148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.301515 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.301612 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.301709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.301803 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.310850 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 07:33:53.65974319 +0000 UTC Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.404456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.404521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.404540 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.404568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.404586 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.507192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.507544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.507643 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.507735 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.507816 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.559523 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/1.log" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.561308 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/0.log" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.566426 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4" exitCode=1 Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.566491 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.566568 4985 scope.go:117] "RemoveContainer" containerID="9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.569174 4985 scope.go:117] "RemoveContainer" containerID="fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4" Jan 25 00:07:01 crc kubenswrapper[4985]: E0125 00:07:01.569489 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.592298 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.611059 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.611160 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.611187 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.611217 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.611241 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.615550 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.635215 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.673025 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:06:59Z\\\",\\\"message\\\":\\\"2 6297 handler.go:208] Removed *v1.Node event handler 2\\\\nI0125 00:06:59.097478 6297 handler.go:208] Removed *v1.Node event handler 7\\\\nI0125 00:06:59.097501 6297 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097546 6297 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097585 6297 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:06:59.098077 6297 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0125 00:06:59.098089 6297 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0125 00:06:59.098119 6297 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0125 00:06:59.098132 6297 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0125 00:06:59.098153 6297 factory.go:656] Stopping watch factory\\\\nI0125 00:06:59.098166 6297 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0125 00:06:59.098174 6297 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0125 00:06:59.098198 6297 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0125 00:06:59.098231 6297 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.690679 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.709910 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.715020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.715082 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.715100 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.715147 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.715166 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.732737 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.752879 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.771049 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.794655 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.812230 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.817551 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.817606 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.817624 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.817646 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.817670 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.828411 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.849533 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.873086 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:01Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.921322 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.921391 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.921407 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.921432 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:01 crc kubenswrapper[4985]: I0125 00:07:01.921449 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:01Z","lastTransitionTime":"2026-01-25T00:07:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.025962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.026004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.026014 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.026027 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.026036 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.128545 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.128634 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.128663 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.128696 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.128794 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.231834 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.231885 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.231937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.231962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.231982 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.311289 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 10:21:15.344608977 +0000 UTC Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.334968 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.335026 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.335049 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.335077 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.335099 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.421838 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j"] Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.423285 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.430400 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.432245 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.441480 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.441514 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.441525 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.441539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.441550 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.442102 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.462067 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.482830 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.501513 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.524552 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.540495 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.544523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.544552 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.544561 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.544574 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.544616 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.555443 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.573218 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.575401 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/1.log" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.580005 4985 scope.go:117] "RemoveContainer" containerID="fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4" Jan 25 00:07:02 crc kubenswrapper[4985]: E0125 00:07:02.580226 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.596051 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.617032 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.624326 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.624597 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.624638 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.624705 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhfbb\" (UniqueName: \"kubernetes.io/projected/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-kube-api-access-vhfbb\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.626704 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.631944 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.646209 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.648024 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.648073 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.648094 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.648143 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.648164 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.677214 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c03d188441c67f8e3a703e6bf02dc9cda6f2bfafb9a519a4eacd40624b3834b\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:06:59Z\\\",\\\"message\\\":\\\"2 6297 handler.go:208] Removed *v1.Node event handler 2\\\\nI0125 00:06:59.097478 6297 handler.go:208] Removed *v1.Node event handler 7\\\\nI0125 00:06:59.097501 6297 reflector.go:311] Stopping reflector *v1.Pod (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097546 6297 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0125 00:06:59.097585 6297 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:06:59.098077 6297 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI0125 00:06:59.098089 6297 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI0125 00:06:59.098119 6297 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0125 00:06:59.098132 6297 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0125 00:06:59.098153 6297 factory.go:656] Stopping watch factory\\\\nI0125 00:06:59.098166 6297 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0125 00:06:59.098174 6297 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0125 00:06:59.098198 6297 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0125 00:06:59.098231 6297 handler.go:208] Removed *v1.Pod ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:56Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.690624 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.701206 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.717015 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.725384 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.725459 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.725491 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.725586 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhfbb\" (UniqueName: \"kubernetes.io/projected/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-kube-api-access-vhfbb\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.726395 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.726660 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.738508 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.738649 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.751593 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.751631 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.751644 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.751661 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.751677 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.754741 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.761054 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhfbb\" (UniqueName: \"kubernetes.io/projected/a3ed24b0-a81a-4bc5-9218-446a83a8f78d-kube-api-access-vhfbb\") pod \"ovnkube-control-plane-749d76644c-l284j\" (UID: \"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.773347 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.785827 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.800275 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.813715 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.830483 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.847144 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.853841 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.853870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.853880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.853896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.853907 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.861497 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.874389 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.890303 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.909742 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.922886 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.939177 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.956144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.956199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.956211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.956228 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:02 crc kubenswrapper[4985]: I0125 00:07:02.956239 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:02Z","lastTransitionTime":"2026-01-25T00:07:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.049154 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.058020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.058086 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.058136 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.058167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.058189 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: W0125 00:07:03.071344 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda3ed24b0_a81a_4bc5_9218_446a83a8f78d.slice/crio-43d0a4fc8c46676c442637f1ff3fed9ed53447714647c1b603b3105381f5cb42 WatchSource:0}: Error finding container 43d0a4fc8c46676c442637f1ff3fed9ed53447714647c1b603b3105381f5cb42: Status 404 returned error can't find the container with id 43d0a4fc8c46676c442637f1ff3fed9ed53447714647c1b603b3105381f5cb42 Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.161772 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.161819 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.161832 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.161850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.161866 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.265177 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.265241 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.265259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.265284 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.265302 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.274584 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.274672 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.274690 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.274827 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.274926 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.275048 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.299362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.299413 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.299429 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.299452 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.299470 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.312173 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 04:58:08.42537387 +0000 UTC Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.318653 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.323223 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.323260 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.323272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.323291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.323304 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.341175 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.345023 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.345071 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.345088 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.345132 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.345148 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.363358 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.366977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.367030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.367051 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.367075 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.367092 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.385841 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.390675 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.390737 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.390754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.390782 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.390801 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.412386 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: E0125 00:07:03.412526 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.414247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.414288 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.414304 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.414326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.414344 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.517291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.517333 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.517349 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.517370 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.517388 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.583162 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" event={"ID":"a3ed24b0-a81a-4bc5-9218-446a83a8f78d","Type":"ContainerStarted","Data":"769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.583207 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" event={"ID":"a3ed24b0-a81a-4bc5-9218-446a83a8f78d","Type":"ContainerStarted","Data":"591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.583220 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" event={"ID":"a3ed24b0-a81a-4bc5-9218-446a83a8f78d","Type":"ContainerStarted","Data":"43d0a4fc8c46676c442637f1ff3fed9ed53447714647c1b603b3105381f5cb42"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.602666 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.619688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.619762 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.619784 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.619813 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.619835 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.621827 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.632881 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.654289 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.676397 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.696504 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.708450 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.720311 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.721495 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.721543 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.721562 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.721591 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.721613 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.736676 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.746816 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.759587 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.772023 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.783212 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.796825 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.806831 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:03Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.824766 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.824833 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.824857 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.824888 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.824913 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.928495 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.928565 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.928583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.928606 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:03 crc kubenswrapper[4985]: I0125 00:07:03.928627 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:03Z","lastTransitionTime":"2026-01-25T00:07:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.031708 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.031765 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.031784 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.031809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.031828 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.136455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.136912 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.136932 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.136958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.136977 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.240531 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.240583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.240603 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.240625 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.240642 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.297819 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-cqtvp"] Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.298547 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: E0125 00:07:04.298644 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.313238 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 19:43:13.837890497 +0000 UTC Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.318895 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.338016 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.342304 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.342368 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xc2dv\" (UniqueName: \"kubernetes.io/projected/39723ce0-614f-4ada-9cc7-6efe79c7e51c-kube-api-access-xc2dv\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.343307 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.343351 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.343367 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.343389 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.343406 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.359079 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.378411 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.395006 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.416871 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.434832 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.443631 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.443710 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xc2dv\" (UniqueName: \"kubernetes.io/projected/39723ce0-614f-4ada-9cc7-6efe79c7e51c-kube-api-access-xc2dv\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: E0125 00:07:04.443881 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:04 crc kubenswrapper[4985]: E0125 00:07:04.443997 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:04.943969803 +0000 UTC m=+34.975906116 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.445914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.445965 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.445981 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.446005 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.446022 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.451547 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.472099 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.474362 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xc2dv\" (UniqueName: \"kubernetes.io/projected/39723ce0-614f-4ada-9cc7-6efe79c7e51c-kube-api-access-xc2dv\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.500977 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.522563 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.543165 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.548945 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.549052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.549084 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.549158 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.549186 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.564433 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.582596 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.610834 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.626588 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:04Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.651395 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.651449 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.651466 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.651490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.651509 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.755148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.755234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.755261 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.755294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.755319 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.858819 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.858881 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.858897 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.858919 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.858936 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.947774 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:04 crc kubenswrapper[4985]: E0125 00:07:04.947960 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:04 crc kubenswrapper[4985]: E0125 00:07:04.948076 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:05.948049244 +0000 UTC m=+35.979985557 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.962146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.962210 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.962236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.962268 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:04 crc kubenswrapper[4985]: I0125 00:07:04.962291 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:04Z","lastTransitionTime":"2026-01-25T00:07:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.065561 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.065614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.065631 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.065653 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.065670 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.169232 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.169301 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.169319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.169341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.169358 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.249507 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.249648 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.249693 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.249742 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249839 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249860 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249899 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249898 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249922 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249956 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:21.249925088 +0000 UTC m=+51.281861401 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.249996 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:21.249978599 +0000 UTC m=+51.281914902 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250040 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:07:21.25001017 +0000 UTC m=+51.281946473 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.250189 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250255 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:21.250204135 +0000 UTC m=+51.282140408 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250401 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250426 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250446 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.250515 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:21.250497122 +0000 UTC m=+51.282433425 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.271848 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.272088 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.272165 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.272264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.272290 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.275539 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.275569 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.275616 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.275760 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.275945 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.276207 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.313873 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 07:40:27.504991603 +0000 UTC Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.376368 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.376448 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.376475 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.376511 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.376536 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.479564 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.479614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.479633 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.479655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.479674 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.582260 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.582288 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.582296 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.582307 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.582315 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.684961 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.685034 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.685053 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.685080 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.685098 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.787550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.787634 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.787653 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.787679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.787737 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.891281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.891715 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.891893 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.892056 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.892236 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.955945 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.956135 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: E0125 00:07:05.956501 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:07.956482023 +0000 UTC m=+37.988418306 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.995184 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.995280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.995298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.995322 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:05 crc kubenswrapper[4985]: I0125 00:07:05.995340 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:05Z","lastTransitionTime":"2026-01-25T00:07:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.098532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.098609 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.098627 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.098650 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.098669 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.201724 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.201773 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.201785 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.201803 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.201816 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.274132 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:06 crc kubenswrapper[4985]: E0125 00:07:06.274332 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.306559 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.306619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.306641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.306670 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.306688 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.315171 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 01:27:34.955006659 +0000 UTC Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.409682 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.409728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.409745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.409767 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.409783 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.512888 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.512927 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.512942 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.512965 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.512981 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.615656 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.615713 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.615731 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.615754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.615770 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.718415 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.718471 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.718489 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.718512 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.718529 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.821043 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.821166 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.821195 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.821228 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.821255 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.924989 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.925055 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.925074 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.925097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:06 crc kubenswrapper[4985]: I0125 00:07:06.925142 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:06Z","lastTransitionTime":"2026-01-25T00:07:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.028083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.028183 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.028209 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.028239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.028262 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.131556 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.131907 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.132185 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.132393 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.132592 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.235864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.235921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.235938 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.235962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.235976 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.274497 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.274563 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.274519 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:07 crc kubenswrapper[4985]: E0125 00:07:07.274710 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:07 crc kubenswrapper[4985]: E0125 00:07:07.274807 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:07 crc kubenswrapper[4985]: E0125 00:07:07.274945 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.316013 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 00:22:46.618587334 +0000 UTC Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.339718 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.339802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.339828 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.339852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.339870 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.443087 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.443159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.443174 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.443197 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.443214 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.546175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.546214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.546226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.546240 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.546250 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.649297 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.649575 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.649722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.649858 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.650008 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.753048 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.753100 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.753151 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.753175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.753193 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.856941 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.857430 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.857600 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.857763 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.857908 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.961020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.961093 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.961148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.961178 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.961199 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:07Z","lastTransitionTime":"2026-01-25T00:07:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:07 crc kubenswrapper[4985]: I0125 00:07:07.976323 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:07 crc kubenswrapper[4985]: E0125 00:07:07.976573 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:07 crc kubenswrapper[4985]: E0125 00:07:07.976695 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:11.976671099 +0000 UTC m=+42.008607402 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.063841 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.064187 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.064359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.064513 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.064744 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.168500 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.168570 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.168590 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.168617 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.168634 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.272124 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.272175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.272195 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.272213 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.272225 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.274382 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:08 crc kubenswrapper[4985]: E0125 00:07:08.274519 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.317196 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 20:38:38.62308238 +0000 UTC Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.375226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.375282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.375298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.375319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.375338 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.479196 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.479278 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.479295 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.479316 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.479335 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.582484 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.582531 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.582550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.582573 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.582591 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.685621 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.685698 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.685720 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.685747 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.685785 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.789209 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.789275 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.789293 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.789316 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.789334 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.892600 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.892754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.892776 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.892799 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.892897 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.996148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.996205 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.996222 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.996247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:08 crc kubenswrapper[4985]: I0125 00:07:08.996265 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:08Z","lastTransitionTime":"2026-01-25T00:07:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.098810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.098894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.098925 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.098957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.098981 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.202014 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.202084 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.202146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.202172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.202192 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.274398 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.274455 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.274408 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:09 crc kubenswrapper[4985]: E0125 00:07:09.274677 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:09 crc kubenswrapper[4985]: E0125 00:07:09.274885 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:09 crc kubenswrapper[4985]: E0125 00:07:09.274999 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.306092 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.306194 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.306217 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.306246 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.306259 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.318418 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 22:25:23.293138721 +0000 UTC Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.409176 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.409244 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.409268 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.409298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.409326 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.512221 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.512291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.512311 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.512363 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.512557 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.615852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.615913 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.615932 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.615958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.615977 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.718907 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.719454 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.719583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.719726 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.719899 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.822728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.822922 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.822942 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.822964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.822981 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.926091 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.926370 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.926465 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.926550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:09 crc kubenswrapper[4985]: I0125 00:07:09.926640 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:09Z","lastTransitionTime":"2026-01-25T00:07:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.029712 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.029771 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.029790 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.029813 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.029829 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.132412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.132473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.132490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.132514 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.132531 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.235864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.235930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.235946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.235969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.235987 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.274713 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:10 crc kubenswrapper[4985]: E0125 00:07:10.274884 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.304225 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.319374 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 07:53:26.322210618 +0000 UTC Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.323985 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.338889 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.338954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.338978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.339007 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.339029 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.339238 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.362465 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.389819 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.408699 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.428310 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.442273 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.442342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.442368 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.442398 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.442422 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.449784 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.469876 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.500750 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.516485 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.533986 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.545074 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.545117 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.545128 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.545145 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.545156 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.555664 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.575676 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.590000 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.603839 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:10Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.648437 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.648617 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.648768 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.648908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.649032 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.751641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.751959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.752337 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.752737 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.753074 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.856729 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.856782 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.856805 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.856887 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.856957 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.961176 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.961278 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.961299 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.961364 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:10 crc kubenswrapper[4985]: I0125 00:07:10.961391 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:10Z","lastTransitionTime":"2026-01-25T00:07:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.064297 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.064388 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.064418 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.064453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.064476 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.168374 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.168436 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.168452 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.168475 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.168492 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.271949 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.272011 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.272030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.272055 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.272074 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.274601 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.274627 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.274946 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:11 crc kubenswrapper[4985]: E0125 00:07:11.275150 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:11 crc kubenswrapper[4985]: E0125 00:07:11.275266 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:11 crc kubenswrapper[4985]: E0125 00:07:11.275475 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.320544 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 09:42:23.620926919 +0000 UTC Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.374727 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.374776 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.374814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.374834 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.374844 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.477978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.478058 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.478083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.478156 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.478184 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.580910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.580954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.580970 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.580991 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.581010 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.684016 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.684058 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.684069 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.684085 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.684096 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.787569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.787633 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.787651 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.787678 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.787703 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.890519 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.890580 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.890595 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.890613 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.890624 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.993269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.993321 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.993341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.993358 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:11 crc kubenswrapper[4985]: I0125 00:07:11.993370 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:11Z","lastTransitionTime":"2026-01-25T00:07:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.022427 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:12 crc kubenswrapper[4985]: E0125 00:07:12.022678 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:12 crc kubenswrapper[4985]: E0125 00:07:12.022808 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:20.022780842 +0000 UTC m=+50.054717145 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.096198 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.096238 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.096249 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.096265 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.096276 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.199186 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.199216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.199225 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.199239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.199247 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.274787 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:12 crc kubenswrapper[4985]: E0125 00:07:12.274907 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.302186 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.302252 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.302276 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.302307 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.302329 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.320758 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 02:04:38.415931141 +0000 UTC Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.405157 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.405203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.405215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.405237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.405249 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.508660 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.508734 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.508756 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.508783 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.508805 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.611266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.611318 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.611333 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.611348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.611360 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.714190 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.714224 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.714233 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.714245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.714254 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.817192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.817258 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.817276 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.817304 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.817324 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.924045 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.924140 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.924160 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.924186 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:12 crc kubenswrapper[4985]: I0125 00:07:12.924204 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:12Z","lastTransitionTime":"2026-01-25T00:07:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.027372 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.027425 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.027442 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.027464 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.027482 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.131713 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.131774 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.131793 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.131816 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.131833 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.234637 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.234707 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.234727 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.234749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.234767 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.274459 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.274614 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.274466 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.274624 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.274847 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.275084 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.321403 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 03:10:11.53068513 +0000 UTC Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.337385 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.337446 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.337463 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.337487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.337504 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.440751 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.440801 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.440813 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.440828 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.440841 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.541249 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.541337 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.541364 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.541394 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.541418 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.564092 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:13Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.570289 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.570353 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.570375 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.570401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.570419 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.592608 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:13Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.598795 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.598876 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.598901 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.598928 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.598946 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.621805 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:13Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.627812 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.627883 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.627948 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.627977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.627995 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.649048 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:13Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.654691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.654749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.654769 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.654794 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.654813 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.677586 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:13Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:13Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:13 crc kubenswrapper[4985]: E0125 00:07:13.677938 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.682791 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.682857 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.682879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.682909 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.682936 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.786067 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.786619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.787381 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.787428 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.787442 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.891062 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.891168 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.891192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.891222 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.891243 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.994189 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.994270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.994291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.994317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:13 crc kubenswrapper[4985]: I0125 00:07:13.994336 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:13Z","lastTransitionTime":"2026-01-25T00:07:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.097770 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.097837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.097859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.097884 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.097903 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.200602 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.200970 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.201101 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.201274 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.201506 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.274724 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:14 crc kubenswrapper[4985]: E0125 00:07:14.274940 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.304726 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.304783 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.304800 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.304825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.304843 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.322288 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 17:55:29.116219448 +0000 UTC Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.408471 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.408544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.408568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.408599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.408621 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.512087 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.512271 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.512291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.512313 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.512329 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.647555 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.647595 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.647606 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.647621 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.647634 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.750211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.750256 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.750266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.750283 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.750294 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.852916 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.852971 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.852983 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.853002 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.853043 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.955450 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.955504 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.955521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.955545 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:14 crc kubenswrapper[4985]: I0125 00:07:14.955561 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:14Z","lastTransitionTime":"2026-01-25T00:07:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.058216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.058263 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.058280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.058303 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.058334 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.161820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.161871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.161887 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.161912 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.161929 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.264888 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.265000 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.265020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.265046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.265066 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.275305 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:15 crc kubenswrapper[4985]: E0125 00:07:15.275457 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.275540 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:15 crc kubenswrapper[4985]: E0125 00:07:15.275622 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.275640 4985 scope.go:117] "RemoveContainer" containerID="fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.276195 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:15 crc kubenswrapper[4985]: E0125 00:07:15.276482 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.323248 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 14:55:29.797802039 +0000 UTC Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.368849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.368918 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.368941 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.368970 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.368995 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.472858 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.472923 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.472941 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.472965 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.472982 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.576518 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.576560 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.576572 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.576592 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.576604 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.656559 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/1.log" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.662226 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.662919 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.683534 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.683596 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.683628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.683672 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.683696 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.684230 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.704359 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.720571 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.736419 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.762267 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.786887 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.786954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.786972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.786996 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.787014 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.788808 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.807662 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.835865 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.859525 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.887606 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.889400 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.889462 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.889475 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.889513 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.889528 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.907211 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.924570 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.943158 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.968026 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.980799 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.992550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.992728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.992823 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.992914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.993027 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:15Z","lastTransitionTime":"2026-01-25T00:07:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:15 crc kubenswrapper[4985]: I0125 00:07:15.996933 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:15Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.099376 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.099437 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.099455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.099481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.099499 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.202274 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.202341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.202357 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.202378 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.202396 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.274788 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:16 crc kubenswrapper[4985]: E0125 00:07:16.275068 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.305300 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.305359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.305377 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.305403 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.305421 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.323730 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 16:50:48.198329279 +0000 UTC Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.409500 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.409571 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.409589 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.409614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.409631 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.512832 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.512895 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.512909 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.512925 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.512936 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.615952 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.616012 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.616035 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.616065 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.616087 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.668981 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/2.log" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.670551 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/1.log" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.674858 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" exitCode=1 Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.674913 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.674966 4985 scope.go:117] "RemoveContainer" containerID="fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.677487 4985 scope.go:117] "RemoveContainer" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" Jan 25 00:07:16 crc kubenswrapper[4985]: E0125 00:07:16.678184 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.702666 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722091 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722132 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722155 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722172 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.722168 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.742679 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.764446 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.788577 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.814225 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.825659 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.825718 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.825739 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.825766 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.825789 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.831604 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.853050 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.871486 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.887575 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.906643 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.929372 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.929485 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.929503 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.929563 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.929583 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:16Z","lastTransitionTime":"2026-01-25T00:07:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.938567 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fb431b10a7b872ef9ceab2f5750aabf6fc14cbbf218615530243837f5b6e50f4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:00Z\\\",\\\"message\\\":\\\"ocal for Pod openshift-multus/multus-additional-cni-plugins-dt2mv in node crc\\\\nF0125 00:07:00.362453 6441 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:00Z is after 2025-08-24T17:21:41Z]\\\\nI0125 00:07:00.362483 6441 obj_retry.go:386] Retry successful for *v1.Pod openshift-multus/multus-additional-cni-plugins-dt2mv after 0 failed attempt(s)\\\\nI0125 00:07:00.362488 6441 default_network_controller.go:776] Recording success event on pod openshift-multus/multus-additional-cni-plugins-dt2mv\\\\nI0125 00:07:00.362359 6441 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-operator/network-operator-58b4c7f79c-55g\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:59Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.955738 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.977402 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:16 crc kubenswrapper[4985]: I0125 00:07:16.997369 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:16Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.015562 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.033202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.033247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.033264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.033285 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.033302 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.135576 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.135640 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.135657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.135684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.135703 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.238424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.238473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.238485 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.238502 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.238515 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.274087 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.274079 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:17 crc kubenswrapper[4985]: E0125 00:07:17.274418 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:17 crc kubenswrapper[4985]: E0125 00:07:17.274278 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.274145 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:17 crc kubenswrapper[4985]: E0125 00:07:17.274506 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.324737 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 13:18:02.783203471 +0000 UTC Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.341145 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.341211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.341236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.341270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.341296 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.444710 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.444770 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.444793 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.444821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.444838 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.547880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.547955 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.547975 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.548002 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.548021 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.650516 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.650565 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.650584 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.650607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.650623 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.681868 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/2.log" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.689081 4985 scope.go:117] "RemoveContainer" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" Jan 25 00:07:17 crc kubenswrapper[4985]: E0125 00:07:17.689786 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.721888 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.740516 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.757287 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.757354 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.757376 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.757404 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.757427 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.763360 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.783725 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.805993 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.826422 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.844089 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.860552 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.860601 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.860620 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.860643 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.860659 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.865909 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.886749 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.904860 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.922200 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.935502 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.954564 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.963427 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.963463 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.963473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.963487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.963497 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:17Z","lastTransitionTime":"2026-01-25T00:07:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.969641 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:17 crc kubenswrapper[4985]: I0125 00:07:17.983067 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.000786 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:17Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.066425 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.066486 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.066506 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.066530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.066549 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.170144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.170203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.170220 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.170252 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.170288 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.273670 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.274099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.273859 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.274374 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.274603 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.274631 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: E0125 00:07:18.274682 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.326536 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 18:25:47.429764713 +0000 UTC Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.377718 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.378099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.378263 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.378424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.378577 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.481507 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.481557 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.481574 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.481598 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.481614 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.584437 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.584487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.584500 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.584518 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.584534 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.687343 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.687401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.687420 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.687482 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.687503 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.790321 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.790393 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.790418 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.790450 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.790472 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.893909 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.893960 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.893977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.893999 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.894016 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.997444 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.997508 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.997525 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.997550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:18 crc kubenswrapper[4985]: I0125 00:07:18.997568 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:18Z","lastTransitionTime":"2026-01-25T00:07:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.101027 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.101081 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.101097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.101153 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.101188 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.204232 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.204281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.204299 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.204325 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.204341 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.274469 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.274576 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.274482 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:19 crc kubenswrapper[4985]: E0125 00:07:19.274760 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:19 crc kubenswrapper[4985]: E0125 00:07:19.274886 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:19 crc kubenswrapper[4985]: E0125 00:07:19.275034 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.308663 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.308718 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.308736 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.308759 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.308776 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.327168 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 03:34:15.027016775 +0000 UTC Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.413083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.413211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.413231 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.413292 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.413310 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.516719 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.517176 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.517344 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.517501 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.517674 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.621138 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.621490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.621664 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.621864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.622029 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.725313 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.725363 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.725379 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.725401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.725419 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.829207 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.829270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.829292 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.829315 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.829333 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.932150 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.932227 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.932246 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.932269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:19 crc kubenswrapper[4985]: I0125 00:07:19.932288 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:19Z","lastTransitionTime":"2026-01-25T00:07:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.035569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.035655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.035678 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.035708 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.035731 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.117683 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:20 crc kubenswrapper[4985]: E0125 00:07:20.117912 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:20 crc kubenswrapper[4985]: E0125 00:07:20.117982 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:07:36.117958856 +0000 UTC m=+66.149895159 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.138387 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.138429 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.138442 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.138459 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.138472 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.240905 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.240964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.240983 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.241007 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.241025 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.274752 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:20 crc kubenswrapper[4985]: E0125 00:07:20.275034 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.296053 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.312501 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.327985 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 23:32:30.163680615 +0000 UTC Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.332790 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.343906 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.343967 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.343985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.344010 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.344029 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.358636 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.387818 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.413954 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.431583 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.447352 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.447408 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.447425 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.447453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.447473 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.455206 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.477512 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.496136 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.516022 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.549667 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.550584 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.550645 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.550668 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.550702 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.550723 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.569219 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.594272 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.615759 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.634512 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:20Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.654349 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.654533 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.654695 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.654883 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.655038 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.757868 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.757940 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.757959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.757984 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.758003 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.861182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.861238 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.861255 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.861283 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.861301 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.964214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.964276 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.964293 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.964317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:20 crc kubenswrapper[4985]: I0125 00:07:20.964334 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:20Z","lastTransitionTime":"2026-01-25T00:07:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.067071 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.067173 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.067198 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.067226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.067243 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.169699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.169753 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.169770 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.169795 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.169820 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273042 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273258 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273698 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273770 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.273705 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.273901 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.274016 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.274147 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.328716 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 20:36:11.037348981 +0000 UTC Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.331186 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.331311 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.331362 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.331426 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.331471 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331593 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331619 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331624 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:07:53.331569611 +0000 UTC m=+83.363505934 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331679 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331705 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:53.331685874 +0000 UTC m=+83.363622177 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331718 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331732 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:53.331720095 +0000 UTC m=+83.363656408 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331734 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331778 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331802 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331742 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331886 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:53.331857289 +0000 UTC m=+83.363793592 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:21 crc kubenswrapper[4985]: E0125 00:07:21.331956 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:07:53.331936991 +0000 UTC m=+83.363873304 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.376615 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.376679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.376699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.376732 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.376750 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.479838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.479893 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.479915 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.479937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.479954 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.583280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.583326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.583342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.583366 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.583383 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.686972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.687033 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.687050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.687075 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.687098 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.790442 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.790506 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.790523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.790583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.790603 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.893950 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.894011 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.894028 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.894054 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.894074 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.997294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.997390 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.997408 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.997435 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:21 crc kubenswrapper[4985]: I0125 00:07:21.997452 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:21Z","lastTransitionTime":"2026-01-25T00:07:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.100525 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.100589 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.100607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.100634 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.100655 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.203458 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.203546 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.203571 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.203602 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.203623 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.274792 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:22 crc kubenswrapper[4985]: E0125 00:07:22.275004 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.306542 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.306602 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.306619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.306643 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.306661 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.329915 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 15:38:46.646357276 +0000 UTC Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.409740 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.409780 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.409802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.409826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.409846 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.513246 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.513319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.513332 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.513348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.513360 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.616527 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.616580 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.616596 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.616618 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.616638 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.719839 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.719899 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.719921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.719949 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.719970 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.823282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.823343 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.823365 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.823395 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.823417 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.926668 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.926735 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.926758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.926788 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:22 crc kubenswrapper[4985]: I0125 00:07:22.926818 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:22Z","lastTransitionTime":"2026-01-25T00:07:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.029824 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.029859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.029871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.029888 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.029904 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.132266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.132329 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.132347 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.132369 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.132386 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.191444 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.204613 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.210481 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.228498 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.235521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.235599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.235624 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.235653 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.235674 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.248875 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.271015 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.273742 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.273820 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.273827 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:23 crc kubenswrapper[4985]: E0125 00:07:23.274057 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:23 crc kubenswrapper[4985]: E0125 00:07:23.274189 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:23 crc kubenswrapper[4985]: E0125 00:07:23.273952 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.289082 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.313073 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.330532 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 02:14:32.692239792 +0000 UTC Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.330581 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.340758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.340809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.340826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.340849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.340867 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.347180 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.367215 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.392316 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.408512 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.428464 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.444052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.444141 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.444166 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.444198 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.444219 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.448859 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.469076 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.496407 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.512974 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.547664 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.547756 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.547774 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.547797 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.547814 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.650081 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.650221 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.650240 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.650262 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.650280 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.753004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.753076 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.753100 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.753172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.753197 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.856184 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.856255 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.856276 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.856302 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.856322 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.933376 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.933481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.933501 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.933525 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.933541 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: E0125 00:07:23.955739 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.960995 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.961080 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.961098 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.961144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.961163 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:23 crc kubenswrapper[4985]: E0125 00:07:23.981176 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.985796 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.985865 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.985882 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.985907 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:23 crc kubenswrapper[4985]: I0125 00:07:23.985925 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:23Z","lastTransitionTime":"2026-01-25T00:07:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: E0125 00:07:24.000973 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:23Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.006355 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.006429 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.006455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.006487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.006514 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: E0125 00:07:24.026218 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.030179 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.030223 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.030234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.030248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.030261 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: E0125 00:07:24.044872 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:24Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:24 crc kubenswrapper[4985]: E0125 00:07:24.045028 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.047498 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.047550 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.047569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.047591 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.047606 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.150414 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.150471 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.150488 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.150511 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.150530 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.253363 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.253440 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.253463 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.253493 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.253515 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.274321 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:24 crc kubenswrapper[4985]: E0125 00:07:24.274517 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.331445 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 14:32:30.037981105 +0000 UTC Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.355861 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.355910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.355927 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.355947 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.355965 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.459910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.460011 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.460277 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.460321 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.460340 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.563575 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.563628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.563646 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.563670 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.563687 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.693215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.693275 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.693294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.693317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.693329 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.796559 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.796644 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.796662 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.796691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.796716 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.900240 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.900306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.900321 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.900345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:24 crc kubenswrapper[4985]: I0125 00:07:24.900361 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:24Z","lastTransitionTime":"2026-01-25T00:07:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.003348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.003407 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.003419 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.003435 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.003447 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.105793 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.105865 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.105914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.105953 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.105986 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.209215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.209256 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.209267 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.209281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.209292 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.274366 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.274366 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.274560 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:25 crc kubenswrapper[4985]: E0125 00:07:25.274754 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:25 crc kubenswrapper[4985]: E0125 00:07:25.274974 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:25 crc kubenswrapper[4985]: E0125 00:07:25.275184 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.312802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.312865 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.312882 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.312909 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.312928 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.332412 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 11:21:16.272651062 +0000 UTC Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.415748 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.415817 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.415841 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.415872 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.415896 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.519849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.519924 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.519941 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.519968 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.519984 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.623613 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.623691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.623708 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.623732 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.623750 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.728808 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.728859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.728871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.728890 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.728904 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.832520 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.832557 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.832567 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.832583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.832594 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.935162 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.935228 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.935250 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.935282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:25 crc kubenswrapper[4985]: I0125 00:07:25.935303 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:25Z","lastTransitionTime":"2026-01-25T00:07:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.037994 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.038053 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.038070 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.038092 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.038145 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.140747 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.140809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.140825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.140849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.140873 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.244641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.244733 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.244757 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.244793 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.244815 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.274547 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:26 crc kubenswrapper[4985]: E0125 00:07:26.274853 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.332646 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 14:19:15.59467849 +0000 UTC Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.348170 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.348234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.348251 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.348279 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.348302 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.452483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.452535 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.452551 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.452572 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.452591 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.556022 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.556154 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.556174 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.556199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.556216 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.659664 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.659788 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.659816 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.659846 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.659869 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.762733 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.762792 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.762809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.762834 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.762851 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.865667 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.865728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.865745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.865807 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.865827 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.969203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.969266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.969283 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.969308 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:26 crc kubenswrapper[4985]: I0125 00:07:26.969477 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:26Z","lastTransitionTime":"2026-01-25T00:07:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.073038 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.073151 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.073172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.073200 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.073219 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.176457 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.176516 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.176533 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.176555 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.176572 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.273900 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.273966 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.273935 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:27 crc kubenswrapper[4985]: E0125 00:07:27.274177 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:27 crc kubenswrapper[4985]: E0125 00:07:27.274316 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:27 crc kubenswrapper[4985]: E0125 00:07:27.274602 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.279410 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.279483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.279512 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.279544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.279569 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.333140 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 03:05:48.962045957 +0000 UTC Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.382339 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.382375 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.382386 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.382399 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.382410 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.486021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.486157 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.486183 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.486212 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.486234 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.589599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.589644 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.589655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.589672 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.589684 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.692223 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.692284 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.692302 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.692326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.692344 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.795313 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.795443 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.795505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.795532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.795549 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.898334 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.898399 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.898424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.898455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:27 crc kubenswrapper[4985]: I0125 00:07:27.898478 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:27Z","lastTransitionTime":"2026-01-25T00:07:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.002070 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.002182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.002206 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.002237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.002259 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.105728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.105825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.105843 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.105866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.105886 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.209377 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.209443 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.209461 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.209517 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.209535 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.274665 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:28 crc kubenswrapper[4985]: E0125 00:07:28.274856 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.312545 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.312607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.312624 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.312650 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.312668 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.333725 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 08:22:40.669022936 +0000 UTC Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.415949 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.416068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.416172 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.416208 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.416229 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.519412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.519487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.519505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.519530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.519547 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.623341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.623451 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.623503 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.623569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.623625 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.726741 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.726807 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.726826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.726851 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.726870 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.830391 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.830902 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.830934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.831018 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.831086 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.933937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.934020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.934046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.934083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:28 crc kubenswrapper[4985]: I0125 00:07:28.934144 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:28Z","lastTransitionTime":"2026-01-25T00:07:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.036871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.036939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.036959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.036985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.037002 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.140458 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.140532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.140563 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.140595 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.140616 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.244102 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.244193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.244211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.244246 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.244265 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.274309 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.274309 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:29 crc kubenswrapper[4985]: E0125 00:07:29.274478 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.274326 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:29 crc kubenswrapper[4985]: E0125 00:07:29.274622 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:29 crc kubenswrapper[4985]: E0125 00:07:29.274708 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.334467 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 15:24:16.591781858 +0000 UTC Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.347089 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.347173 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.347190 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.347214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.347233 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.450199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.450266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.450289 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.450320 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.450341 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.553845 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.553910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.553928 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.553951 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.553968 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.656998 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.657050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.657068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.657091 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.657143 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.760803 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.760864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.760881 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.760903 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.760921 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.864910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.865082 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.865159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.865188 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.865205 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.968584 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.968649 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.968667 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.968690 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:29 crc kubenswrapper[4985]: I0125 00:07:29.968709 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:29Z","lastTransitionTime":"2026-01-25T00:07:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.072197 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.072253 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.072271 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.072293 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.072310 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.175195 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.175272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.175295 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.175324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.175346 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.273849 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:30 crc kubenswrapper[4985]: E0125 00:07:30.274148 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.279297 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.279342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.279360 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.279383 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.279400 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.302202 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.322690 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.334652 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 16:16:44.712728865 +0000 UTC Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.342949 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.363671 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.381455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.381508 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.381524 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.381545 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.381566 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.395823 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.414723 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.439205 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.459647 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.477017 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.484069 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.484178 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.484252 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.484285 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.484307 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.498555 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.530901 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.548587 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.569188 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.587227 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.587300 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.587324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.587356 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.587380 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.588915 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.608326 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.629992 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.649190 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:30Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.690427 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.690482 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.690500 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.690523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.690540 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.793637 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.793684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.793693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.793708 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.793717 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.897620 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.897678 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.897691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.897709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:30 crc kubenswrapper[4985]: I0125 00:07:30.897725 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:30Z","lastTransitionTime":"2026-01-25T00:07:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.001008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.001054 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.001068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.001088 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.001100 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.103470 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.103733 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.103873 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.104008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.104216 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.207270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.207607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.207698 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.207799 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.207907 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.274377 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.274485 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:31 crc kubenswrapper[4985]: E0125 00:07:31.274587 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:31 crc kubenswrapper[4985]: E0125 00:07:31.274854 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.275053 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:31 crc kubenswrapper[4985]: E0125 00:07:31.275786 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.311286 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.311342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.311358 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.311382 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.311400 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.334805 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 03:51:58.937337453 +0000 UTC Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.413731 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.413794 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.413811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.413833 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.413850 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.516422 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.516467 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.516479 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.516495 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.516507 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.619815 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.619879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.619896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.619921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.619939 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.723230 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.723282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.723307 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.723332 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.723350 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.826369 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.826407 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.826420 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.826437 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.826450 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.928618 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.928660 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.928673 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.928688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:31 crc kubenswrapper[4985]: I0125 00:07:31.928699 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:31Z","lastTransitionTime":"2026-01-25T00:07:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.031306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.031368 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.031385 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.031412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.031435 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.135249 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.135361 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.135384 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.135825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.135909 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.239181 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.239242 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.239260 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.239283 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.239307 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.275530 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:32 crc kubenswrapper[4985]: E0125 00:07:32.293072 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.294777 4985 scope.go:117] "RemoveContainer" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" Jan 25 00:07:32 crc kubenswrapper[4985]: E0125 00:07:32.294981 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.335064 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 12:57:26.937182454 +0000 UTC Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.342511 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.342555 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.342573 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.342600 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.342618 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.445415 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.445462 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.445478 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.445500 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.445517 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.548968 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.549009 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.549025 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.549046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.549063 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.652392 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.652436 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.652453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.652474 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.652491 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.754932 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.754975 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.754985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.755000 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.755011 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.857915 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.857953 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.857963 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.857978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.857988 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.960551 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.960581 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.960591 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.960603 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:32 crc kubenswrapper[4985]: I0125 00:07:32.960611 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:32Z","lastTransitionTime":"2026-01-25T00:07:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.062654 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.062712 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.062727 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.062746 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.063096 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.165659 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.165706 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.165722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.165741 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.165753 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.268847 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.268911 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.268934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.268962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.268984 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.274177 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.274288 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.274183 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:33 crc kubenswrapper[4985]: E0125 00:07:33.274336 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:33 crc kubenswrapper[4985]: E0125 00:07:33.274465 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:33 crc kubenswrapper[4985]: E0125 00:07:33.274544 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.336238 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 04:18:58.354957593 +0000 UTC Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.370792 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.370851 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.370868 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.370894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.370911 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.473420 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.473489 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.473508 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.473533 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.473551 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.576686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.576745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.576761 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.576784 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.576801 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.679768 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.679802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.679810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.679822 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.679830 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.783089 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.783207 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.783230 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.783259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.783280 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.886423 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.886476 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.886665 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.886686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.886699 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.989853 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.989897 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.989908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.989924 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:33 crc kubenswrapper[4985]: I0125 00:07:33.989939 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:33Z","lastTransitionTime":"2026-01-25T00:07:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.092453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.092499 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.092509 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.092523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.092535 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.195797 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.195845 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.195856 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.195872 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.195885 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.203720 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.203798 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.203825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.203855 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.203879 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.219261 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.223398 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.223450 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.223470 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.223491 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.223507 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.243818 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.249194 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.249272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.249286 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.249306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.249321 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.266043 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.271323 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.271400 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.271421 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.271448 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.271469 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.273777 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.273902 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.286209 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.289818 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.289866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.289879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.289939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.289951 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.302461 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:34Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:34Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:34 crc kubenswrapper[4985]: E0125 00:07:34.302611 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.304146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.304179 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.304188 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.304201 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.304212 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.336335 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 16:22:14.916205585 +0000 UTC Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.406491 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.406538 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.406549 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.406564 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.406575 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.509232 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.509357 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.509425 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.509452 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.509525 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.611894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.611930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.611941 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.611958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.611971 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.713699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.713720 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.713728 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.713740 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.713749 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.816351 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.816380 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.816392 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.816407 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.816418 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.919347 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.919387 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.919397 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.919411 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:34 crc kubenswrapper[4985]: I0125 00:07:34.919423 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:34Z","lastTransitionTime":"2026-01-25T00:07:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.022131 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.022175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.022188 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.022203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.022214 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.125259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.125335 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.125353 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.125377 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.125394 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.228319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.228352 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.228367 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.228382 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.228396 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.273819 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.273878 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.273940 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:35 crc kubenswrapper[4985]: E0125 00:07:35.273989 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:35 crc kubenswrapper[4985]: E0125 00:07:35.274082 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:35 crc kubenswrapper[4985]: E0125 00:07:35.274268 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.330464 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.330519 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.330536 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.330561 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.330578 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.336800 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 19:00:19.781844131 +0000 UTC Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.433255 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.433795 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.433837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.433867 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.433889 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.536384 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.536428 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.536444 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.536465 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.536480 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.638465 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.638534 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.638546 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.638565 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.638597 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.741743 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.741818 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.741841 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.741915 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.741941 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.845052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.845169 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.845197 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.845317 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.845382 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.948568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.948613 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.948630 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.948653 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:35 crc kubenswrapper[4985]: I0125 00:07:35.948671 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:35Z","lastTransitionTime":"2026-01-25T00:07:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.050753 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.050782 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.050792 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.050805 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.050814 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.122887 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:36 crc kubenswrapper[4985]: E0125 00:07:36.123067 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:36 crc kubenswrapper[4985]: E0125 00:07:36.123181 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:08:08.123161157 +0000 UTC m=+98.155097500 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.153779 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.153877 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.153892 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.153911 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.153923 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.256862 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.256917 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.256935 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.256958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.256974 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.274557 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:36 crc kubenswrapper[4985]: E0125 00:07:36.274952 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.337337 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 11:04:40.850085047 +0000 UTC Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.359748 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.359787 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.359803 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.359825 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.359843 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.461795 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.461854 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.461871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.461897 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.461914 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.564587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.564628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.564639 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.564652 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.564663 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.667681 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.668020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.668031 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.668043 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.668052 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.755697 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/0.log" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.755750 4985 generic.go:334] "Generic (PLEG): container finished" podID="0294dfed-64df-4d3c-92de-7a93787780a2" containerID="2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473" exitCode=1 Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.755780 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerDied","Data":"2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.756252 4985 scope.go:117] "RemoveContainer" containerID="2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.769757 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.769804 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.769814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.769827 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.769837 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.780526 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.795580 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.806250 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.818358 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.833196 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.844436 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.856747 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.868268 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.871157 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.871191 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.871199 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.871214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.871224 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.881008 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.893770 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.919495 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.929773 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.943399 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.958527 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.971229 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.973085 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.973142 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.973155 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.973188 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.973201 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:36Z","lastTransitionTime":"2026-01-25T00:07:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.985132 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:36 crc kubenswrapper[4985]: I0125 00:07:36.996883 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:36Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.076216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.076256 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.076265 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.076280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.076290 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.177836 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.177880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.177890 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.177909 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.177922 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.274230 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.274262 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.274262 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:37 crc kubenswrapper[4985]: E0125 00:07:37.274390 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:37 crc kubenswrapper[4985]: E0125 00:07:37.274597 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:37 crc kubenswrapper[4985]: E0125 00:07:37.274687 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.279758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.279807 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.279821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.279837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.279850 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.338314 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 18:13:56.222681809 +0000 UTC Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.382457 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.382491 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.382501 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.382517 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.382527 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.484547 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.484577 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.484585 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.484597 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.484605 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.586194 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.586239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.586253 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.586270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.586280 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.688797 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.688836 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.688847 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.688866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.688876 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.760694 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/0.log" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.760742 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerStarted","Data":"37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.774926 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.787910 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.791679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.791860 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.791905 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.791978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.792051 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.799757 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.809521 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.824509 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.834761 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.848079 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.861424 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.870009 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.881569 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.891254 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.894215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.894257 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.894271 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.894288 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.894302 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.901175 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.911953 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.928879 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.937188 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.948635 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.958327 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:37Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.995810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.995843 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.995854 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.995871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:37 crc kubenswrapper[4985]: I0125 00:07:37.995883 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:37Z","lastTransitionTime":"2026-01-25T00:07:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.097587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.097631 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.097642 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.097656 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.097665 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.199361 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.199395 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.199406 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.199422 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.199432 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.274698 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:38 crc kubenswrapper[4985]: E0125 00:07:38.274797 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.301611 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.301653 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.301662 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.301679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.301688 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.338812 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 11:53:11.919900305 +0000 UTC Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.403452 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.403487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.403499 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.403515 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.403526 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.505510 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.505539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.505547 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.505558 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.505568 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.608215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.608247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.608257 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.608271 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.608282 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.710878 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.710907 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.710917 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.710930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.710938 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.813688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.813754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.813778 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.813807 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.813830 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.916675 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.916730 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.916749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.916779 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:38 crc kubenswrapper[4985]: I0125 00:07:38.916801 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:38Z","lastTransitionTime":"2026-01-25T00:07:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.018482 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.018510 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.018522 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.018537 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.018547 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.120972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.121019 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.121032 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.121047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.121059 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.222719 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.222756 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.222767 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.222783 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.222793 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.273544 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.273628 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:39 crc kubenswrapper[4985]: E0125 00:07:39.273745 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.273765 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:39 crc kubenswrapper[4985]: E0125 00:07:39.273848 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:39 crc kubenswrapper[4985]: E0125 00:07:39.273926 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.325225 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.325278 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.325290 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.325309 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.325323 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.339400 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 16:25:41.713170403 +0000 UTC Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.427966 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.428008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.428020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.428037 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.428049 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.529902 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.529946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.529954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.529968 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.529978 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.632949 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.633020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.633040 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.633066 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.633084 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.735802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.735863 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.735875 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.735891 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.735906 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.837519 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.837556 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.837569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.837583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.837596 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.940676 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.940721 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.940730 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.940746 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:39 crc kubenswrapper[4985]: I0125 00:07:39.940756 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:39Z","lastTransitionTime":"2026-01-25T00:07:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.046544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.046610 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.046628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.046652 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.046669 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.149852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.149930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.149954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.149983 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.150002 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.252422 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.252460 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.252470 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.252485 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.252495 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.274252 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:40 crc kubenswrapper[4985]: E0125 00:07:40.274410 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.287316 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.298608 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.311311 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.322464 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.335951 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.339525 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 03:08:41.149574571 +0000 UTC Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.349334 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.355837 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.355870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.355887 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.355903 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.355913 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.364731 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.382215 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.395518 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.409912 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.420527 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.432263 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.446839 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.458294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.458340 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.458353 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.458371 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.458383 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.473128 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.482641 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.495617 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.506886 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:40Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.560454 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.560635 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.560695 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.560754 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.560809 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.663502 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.663828 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.663891 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.663955 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.664025 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.766089 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.766134 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.766144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.766158 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.766166 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.868815 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.868860 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.868869 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.868882 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.868892 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.971552 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.971598 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.971607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.971622 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:40 crc kubenswrapper[4985]: I0125 00:07:40.971632 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:40Z","lastTransitionTime":"2026-01-25T00:07:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.074045 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.074135 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.074153 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.074177 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.074195 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.176618 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.176664 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.176676 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.176693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.176706 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.273697 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.273728 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.273797 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:41 crc kubenswrapper[4985]: E0125 00:07:41.273894 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:41 crc kubenswrapper[4985]: E0125 00:07:41.273967 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:41 crc kubenswrapper[4985]: E0125 00:07:41.274039 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.279249 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.279292 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.279304 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.279320 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.279332 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.339995 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 09:29:38.918568142 +0000 UTC Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.380925 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.380969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.380980 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.380995 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.381005 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.483126 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.483161 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.483169 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.483182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.483191 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.585802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.585838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.585849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.585864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.585874 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.688937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.688999 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.689021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.689048 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.689071 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.791879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.791922 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.791936 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.791954 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.791963 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.893735 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.893768 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.893777 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.893791 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.893800 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.996640 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.996684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.996694 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.996708 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:41 crc kubenswrapper[4985]: I0125 00:07:41.996718 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:41Z","lastTransitionTime":"2026-01-25T00:07:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.099626 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.099736 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.099776 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.099814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.099837 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.202193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.202260 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.202277 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.202302 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.202319 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.274598 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:42 crc kubenswrapper[4985]: E0125 00:07:42.274748 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.305021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.305063 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.305074 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.305090 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.305122 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.341594 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 10:53:01.347988051 +0000 UTC Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.407770 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.407811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.407823 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.407838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.407848 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.510270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.510327 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.510344 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.510370 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.510388 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.612181 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.612220 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.612231 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.612247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.612260 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.715046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.715098 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.715129 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.715146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.715157 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.818083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.818205 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.818230 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.818261 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.818286 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.920260 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.920298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.920310 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.920326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:42 crc kubenswrapper[4985]: I0125 00:07:42.920337 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:42Z","lastTransitionTime":"2026-01-25T00:07:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.022523 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.022556 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.022567 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.022582 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.022593 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.125182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.125232 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.125244 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.125262 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.125276 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.228332 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.228403 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.228421 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.228444 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.228461 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.274204 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.274285 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:43 crc kubenswrapper[4985]: E0125 00:07:43.274397 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.274160 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:43 crc kubenswrapper[4985]: E0125 00:07:43.274553 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:43 crc kubenswrapper[4985]: E0125 00:07:43.274750 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.330663 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.330717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.330735 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.330758 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.330776 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.341790 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 15:12:01.177776527 +0000 UTC Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.437281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.437821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.437908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.437977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.438054 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.541341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.541440 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.541458 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.541483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.541500 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.644805 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.644856 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.644874 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.644895 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.644911 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.747014 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.747068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.747084 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.747138 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.747156 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.849167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.849228 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.849245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.849269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.849287 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.951226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.951282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.951291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.951306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:43 crc kubenswrapper[4985]: I0125 00:07:43.951317 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:43Z","lastTransitionTime":"2026-01-25T00:07:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.053714 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.053755 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.053766 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.053780 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.053792 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.156962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.157016 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.157034 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.157056 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.157074 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.259376 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.259439 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.259455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.259481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.259499 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.273649 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.273854 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.275417 4985 scope.go:117] "RemoveContainer" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.342282 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 07:21:51.624979968 +0000 UTC Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.362569 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.362626 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.362643 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.362666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.362682 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.465913 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.465977 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.465996 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.466021 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.466040 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.498383 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.498442 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.498459 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.498483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.498501 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.516806 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.522180 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.522239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.522262 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.522293 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.522315 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.543071 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.548557 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.548614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.548632 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.548655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.548673 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.571455 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.576946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.577016 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.577040 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.578313 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.578348 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.603821 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.608862 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.608906 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.608919 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.608938 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.608954 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.628365 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:44Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: E0125 00:07:44.628501 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.630178 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.630224 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.630236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.630254 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.630269 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.733385 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.733453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.733466 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.733488 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.733503 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.781089 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/2.log" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.783304 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.783707 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.801562 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.818396 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.833137 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.836359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.836423 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.836442 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.836468 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.836484 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.848334 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.862977 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.893962 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.914033 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.928579 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.938717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.938753 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.938765 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.938781 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.938792 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:44Z","lastTransitionTime":"2026-01-25T00:07:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.943381 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.951763 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.962827 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:44 crc kubenswrapper[4985]: I0125 00:07:44.989045 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.000813 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:44Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.015297 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.028315 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.041522 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.041562 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.041574 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.041592 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.041604 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.042895 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.055909 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:45Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.160094 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.160158 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.160171 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.160187 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.160196 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.263451 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.263521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.263539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.263568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.263587 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.274191 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.274219 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:45 crc kubenswrapper[4985]: E0125 00:07:45.274440 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.274224 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:45 crc kubenswrapper[4985]: E0125 00:07:45.274609 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:45 crc kubenswrapper[4985]: E0125 00:07:45.274750 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.342471 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 05:47:01.694190443 +0000 UTC Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.367138 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.367180 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.367193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.367212 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.367227 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.470414 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.470486 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.470510 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.470545 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.470573 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.573073 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.573115 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.573124 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.573136 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.573151 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.675057 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.675099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.675131 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.675147 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.675161 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.779075 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.779152 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.779169 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.779191 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.779208 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.882586 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.882648 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.882666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.882693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.882713 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.986383 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.986441 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.986456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.986481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:45 crc kubenswrapper[4985]: I0125 00:07:45.986496 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:45Z","lastTransitionTime":"2026-01-25T00:07:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.089490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.089544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.089560 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.089585 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.089603 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.192152 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.192214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.192230 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.192252 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.192269 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.274222 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:46 crc kubenswrapper[4985]: E0125 00:07:46.274402 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.295329 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.295405 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.295427 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.295455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.295480 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.342911 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 15:24:03.796597141 +0000 UTC Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.399258 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.399336 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.399358 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.399390 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.399414 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.502184 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.502237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.502254 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.502281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.502298 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.605457 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.605524 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.605538 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.605553 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.605564 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.708272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.708331 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.708349 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.708373 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.708394 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.794070 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/3.log" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.795188 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/2.log" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.799374 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" exitCode=1 Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.799425 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.799475 4985 scope.go:117] "RemoveContainer" containerID="d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.801518 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:07:46 crc kubenswrapper[4985]: E0125 00:07:46.802879 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.815234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.815287 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.815307 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.815330 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.815347 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.822851 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.844683 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.862400 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.878947 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.897020 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.910649 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.917769 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.917821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.917840 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.917864 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.917881 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:46Z","lastTransitionTime":"2026-01-25T00:07:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.931352 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.955748 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.970066 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:46 crc kubenswrapper[4985]: I0125 00:07:46.987009 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:46Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.004772 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.020734 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.020792 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.020806 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.020831 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.020847 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.024352 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.043802 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.074460 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:45Z\\\",\\\"message\\\":\\\" (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:07:45.643698 7045 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644281 7045 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644502 7045 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-api/cluster-autoscaler-operator for endpointslice openshift-machine-api/cluster-autoscaler-operator-hm4j7 as it is not a known egress service\\\\nI0125 00:07:45.644507 7045 master_controller.go:87] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644526 7045 external_controller.go:276] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644546 7045 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI0125 00:07:45.644557 7045 ovnkube.go:599] Stopped ovnkube\\\\nI0125 00:07:45.644565 7045 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 21.471µs\\\\nI0125 00:07:45.644586 7045 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0125 00:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.093090 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.115850 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.123450 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.123514 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.123532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.123560 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.123578 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.134378 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:47Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.226256 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.226324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.226338 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.226355 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.226368 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.274140 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.274167 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.274162 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:47 crc kubenswrapper[4985]: E0125 00:07:47.274263 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:47 crc kubenswrapper[4985]: E0125 00:07:47.274472 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:47 crc kubenswrapper[4985]: E0125 00:07:47.274564 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.328870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.328932 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.328950 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.328974 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.328991 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.343337 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 12:55:36.681254547 +0000 UTC Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.432914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.432967 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.432983 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.433008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.433024 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.536527 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.536571 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.536580 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.536595 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.536604 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.640195 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.640251 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.640269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.640291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.640309 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.743840 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.743915 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.743936 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.743964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.743988 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.804631 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/3.log" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.847247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.847294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.847305 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.847319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.847330 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.950170 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.950234 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.950255 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.950288 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:47 crc kubenswrapper[4985]: I0125 00:07:47.950310 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:47Z","lastTransitionTime":"2026-01-25T00:07:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.053587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.053641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.053659 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.053682 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.053698 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.157168 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.157252 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.157276 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.157309 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.157333 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.261505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.261574 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.261593 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.261620 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.261639 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.273993 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:48 crc kubenswrapper[4985]: E0125 00:07:48.274265 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.343502 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 11:20:15.936751586 +0000 UTC Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.365060 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.365190 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.365216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.365250 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.365275 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.468208 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.468280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.468299 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.468322 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.468340 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.571736 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.571809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.571833 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.571866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.571891 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.674996 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.675061 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.675079 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.675136 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.675155 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.778557 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.778614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.778631 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.778655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.778672 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.880939 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.881009 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.881034 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.881062 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.881084 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.984896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.985136 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.985186 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.985219 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:48 crc kubenswrapper[4985]: I0125 00:07:48.985241 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:48Z","lastTransitionTime":"2026-01-25T00:07:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.089066 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.089148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.089160 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.089176 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.089190 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.192619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.192688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.192706 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.192735 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.192754 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.274340 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:49 crc kubenswrapper[4985]: E0125 00:07:49.274481 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.274571 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.274590 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:49 crc kubenswrapper[4985]: E0125 00:07:49.274942 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:49 crc kubenswrapper[4985]: E0125 00:07:49.275217 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.294909 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.297625 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.297668 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.297681 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.297696 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.297708 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.344298 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 10:30:57.582665913 +0000 UTC Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.401098 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.401184 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.401202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.401226 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.401243 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.504995 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.505061 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.505099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.505167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.505190 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.607913 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.607985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.608008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.608038 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.608059 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.709908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.709945 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.709957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.709973 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.709983 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.813166 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.813222 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.813238 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.813261 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.813282 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.916844 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.916891 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.916908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.916931 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:49 crc kubenswrapper[4985]: I0125 00:07:49.916948 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:49Z","lastTransitionTime":"2026-01-25T00:07:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.019491 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.019543 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.019562 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.019581 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.019595 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.121910 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.121961 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.121972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.121989 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.122001 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.224471 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.224538 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.224556 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.224587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.224605 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.274162 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:50 crc kubenswrapper[4985]: E0125 00:07:50.274321 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.298372 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.313376 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.327412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.327455 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.327469 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.327488 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.327504 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.330782 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.344754 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 07:22:26.474049079 +0000 UTC Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.344907 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.360533 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.385541 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.400255 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.411474 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.431463 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.431505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.431516 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.431534 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.431550 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.461757 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.496093 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.506145 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.518640 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.528590 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.534084 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.534131 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.534142 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.534159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.534171 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.539883 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.550906 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.580174 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:45Z\\\",\\\"message\\\":\\\" (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:07:45.643698 7045 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644281 7045 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644502 7045 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-api/cluster-autoscaler-operator for endpointslice openshift-machine-api/cluster-autoscaler-operator-hm4j7 as it is not a known egress service\\\\nI0125 00:07:45.644507 7045 master_controller.go:87] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644526 7045 external_controller.go:276] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644546 7045 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI0125 00:07:45.644557 7045 ovnkube.go:599] Stopped ovnkube\\\\nI0125 00:07:45.644565 7045 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 21.471µs\\\\nI0125 00:07:45.644586 7045 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0125 00:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.612569 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98b4017b-02b3-4c2e-9d61-ebc765d69aa2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d16406949c0e3b968f27eb5903d09f4f6f00a048431e549dad52ed6ca874db3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd19b3d06299db37381da7ccb0a0c18b7218ac4b11086a71ff99f0b66fe8bd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0f5e5dd040b55d2048fcf8872f95b7f59b9d98b1f57b8db9279566a0fd7c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c0bd735ebf4924b28a8f190b638051fbfa6d711826cedaf478a0bb0350daed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b575ed7dde44ce49b3579b9d416b60d59d03bc782f4b098759ee2ca4ad97a347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.625134 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:50Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.637490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.637519 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.637554 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.637572 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.637583 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.741937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.742007 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.742030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.742060 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.742087 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.844869 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.844934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.844957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.844985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.845008 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.948800 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.948854 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.948872 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.948896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:50 crc kubenswrapper[4985]: I0125 00:07:50.948914 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:50Z","lastTransitionTime":"2026-01-25T00:07:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.051971 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.052072 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.052098 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.052152 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.052229 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.155755 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.155831 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.155850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.155880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.155902 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.259087 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.259198 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.259216 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.259241 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.259264 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.274658 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.274714 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.274712 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:51 crc kubenswrapper[4985]: E0125 00:07:51.274899 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:51 crc kubenswrapper[4985]: E0125 00:07:51.275036 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:51 crc kubenswrapper[4985]: E0125 00:07:51.275286 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.345417 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 09:19:30.151968188 +0000 UTC Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.362154 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.362225 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.362245 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.362274 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.362290 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.470432 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.470544 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.470570 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.470603 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.470638 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.573546 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.573642 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.573662 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.573683 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.573741 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.676732 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.676783 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.676802 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.676826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.676843 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.780134 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.780206 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.780224 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.780247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.780265 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.892782 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.892842 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.892862 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.892887 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.892907 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.996350 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.996441 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.996458 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.996481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:51 crc kubenswrapper[4985]: I0125 00:07:51.996498 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:51Z","lastTransitionTime":"2026-01-25T00:07:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.099593 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.099659 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.099683 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.099709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.099731 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.202901 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.202962 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.202979 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.203003 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.203023 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.273699 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:52 crc kubenswrapper[4985]: E0125 00:07:52.274099 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.305590 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.305632 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.305644 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.305660 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.305670 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.346170 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 02:49:34.275996382 +0000 UTC Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.408028 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.408391 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.408430 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.408477 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.408509 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.511972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.512057 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.512079 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.512168 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.512199 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.615630 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.615693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.615710 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.615734 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.615751 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.718699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.718773 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.718790 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.718816 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.718836 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.821946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.821987 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.822008 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.822032 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.822049 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.925709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.925769 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.925809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.925833 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:52 crc kubenswrapper[4985]: I0125 00:07:52.925850 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:52Z","lastTransitionTime":"2026-01-25T00:07:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.028777 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.028842 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.028865 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.028892 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.028913 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.132680 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.132729 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.132746 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.132769 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.132787 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.235986 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.236435 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.236457 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.236480 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.236496 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.274161 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.274362 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.274498 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.274546 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.274601 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.274703 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.339079 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.339167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.339193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.339221 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.339244 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.345900 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.346026 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.346101 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346187 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.346152324 +0000 UTC m=+147.378088637 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346234 4985 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346314 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.346293448 +0000 UTC m=+147.378229761 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346319 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346357 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346381 4985 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.346352 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 04:27:26.483612318 +0000 UTC Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.346456 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346541 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346571 4985 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346573 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.346550925 +0000 UTC m=+147.378487228 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346592 4985 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346654 4985 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.346544 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346808 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.346681018 +0000 UTC m=+147.378617321 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 25 00:07:53 crc kubenswrapper[4985]: E0125 00:07:53.346831 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.346821782 +0000 UTC m=+147.378758095 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.442951 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.443005 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.443025 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.443047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.443064 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.546391 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.546456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.546478 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.546504 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.546525 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.649209 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.649251 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.649264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.649280 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.649292 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.753633 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.753674 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.753684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.753700 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.753712 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.856689 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.856811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.856883 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.856914 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.856937 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.959270 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.959313 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.959325 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.959340 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:53 crc kubenswrapper[4985]: I0125 00:07:53.959352 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:53Z","lastTransitionTime":"2026-01-25T00:07:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.061237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.061283 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.061294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.061311 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.061321 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.164532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.164612 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.164665 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.164698 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.164720 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.268140 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.268194 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.268211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.268236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.268254 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.274039 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.274300 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.347372 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 21:16:53.519334109 +0000 UTC Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.370934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.371004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.371029 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.371059 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.371083 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.474240 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.474309 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.474333 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.474359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.474379 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.576814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.576868 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.576885 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.576912 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.576929 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.679969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.680042 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.680069 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.680097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.680155 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.771341 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.771401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.771420 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.771443 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.771461 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.793266 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.798850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.798903 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.798920 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.798976 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.798994 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.819797 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.824531 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.824585 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.824603 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.824627 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.824648 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.847918 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.853742 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.853791 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.853811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.853833 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.853850 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.875957 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.882622 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.882668 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.882684 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.882706 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.882723 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.903270 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:54Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:07:54Z is after 2025-08-24T17:21:41Z" Jan 25 00:07:54 crc kubenswrapper[4985]: E0125 00:07:54.903798 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.905963 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.906188 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.906363 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.906531 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:54 crc kubenswrapper[4985]: I0125 00:07:54.906677 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:54Z","lastTransitionTime":"2026-01-25T00:07:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.010301 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.010365 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.010384 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.010411 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.010430 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.113521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.113579 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.113599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.113625 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.113642 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.217035 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.217382 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.217600 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.217970 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.218329 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.273773 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:55 crc kubenswrapper[4985]: E0125 00:07:55.273927 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.274185 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:55 crc kubenswrapper[4985]: E0125 00:07:55.274275 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.274516 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:55 crc kubenswrapper[4985]: E0125 00:07:55.274831 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.320982 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.321374 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.321629 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.322159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.322501 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.348424 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 18:39:00.264386408 +0000 UTC Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.425929 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.426010 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.426036 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.426064 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.426098 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.529080 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.529501 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.529706 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.529905 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.530084 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.633071 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.633207 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.633236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.633266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.633291 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.736775 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.736923 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.736940 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.736963 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.736980 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.839509 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.839586 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.839611 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.839638 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.839656 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.947641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.948006 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.948179 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.948333 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:55 crc kubenswrapper[4985]: I0125 00:07:55.948589 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:55Z","lastTransitionTime":"2026-01-25T00:07:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.051753 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.052248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.052378 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.052481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.052607 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.156907 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.157312 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.157638 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.157972 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.158317 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.260894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.261125 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.261453 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.261740 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.261854 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.273684 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:56 crc kubenswrapper[4985]: E0125 00:07:56.274242 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.727751 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 14:45:42.679786173 +0000 UTC Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.729362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.729396 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.729412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.729433 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.729451 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.831964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.832576 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.832731 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.832883 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.833027 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.936153 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.936211 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.936229 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.936253 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:56 crc kubenswrapper[4985]: I0125 00:07:56.936271 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:56Z","lastTransitionTime":"2026-01-25T00:07:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.039254 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.039353 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.039373 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.039398 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.039416 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.142768 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.142830 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.142859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.142883 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.142900 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.246146 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.246203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.246220 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.246242 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.246261 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.274152 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.274272 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:57 crc kubenswrapper[4985]: E0125 00:07:57.274371 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.274176 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:57 crc kubenswrapper[4985]: E0125 00:07:57.274706 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:57 crc kubenswrapper[4985]: E0125 00:07:57.274604 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.349388 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.349452 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.349474 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.349502 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.349520 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.452601 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.452660 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.452677 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.452749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.452768 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.555529 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.555573 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.555589 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.555611 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.555628 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.658052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.658155 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.658180 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.658205 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.658222 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.727938 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 14:27:38.395274681 +0000 UTC Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.761488 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.761543 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.761560 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.761583 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.761603 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.864716 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.864781 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.864798 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.864821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.864838 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.968061 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.968247 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.968431 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.968460 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:57 crc kubenswrapper[4985]: I0125 00:07:57.968484 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:57Z","lastTransitionTime":"2026-01-25T00:07:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.073068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.073214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.073236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.073261 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.073278 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.177438 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.177497 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.177509 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.177527 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.177540 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.274160 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:07:58 crc kubenswrapper[4985]: E0125 00:07:58.274430 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.281207 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.281306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.281324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.281348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.281366 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.385564 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.385613 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.385625 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.385645 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.385658 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.488631 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.488686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.488706 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.488734 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.488757 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.591308 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.591374 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.591390 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.591416 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.591437 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.694811 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.694849 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.694858 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.694872 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.694882 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.728341 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 12:06:46.78197166 +0000 UTC Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.798987 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.799171 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.799207 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.799289 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.799352 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.902587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.902657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.902683 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.902717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:58 crc kubenswrapper[4985]: I0125 00:07:58.902740 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:58Z","lastTransitionTime":"2026-01-25T00:07:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.005842 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.005963 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.005993 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.006078 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.006102 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.108548 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.108615 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.108649 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.108666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.108680 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.210979 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.211026 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.211041 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.211057 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.211069 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.273797 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.273814 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.273819 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:07:59 crc kubenswrapper[4985]: E0125 00:07:59.274217 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:07:59 crc kubenswrapper[4985]: E0125 00:07:59.274008 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:07:59 crc kubenswrapper[4985]: E0125 00:07:59.274404 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.314527 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.314599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.314623 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.314655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.314678 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.417532 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.417589 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.417609 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.417636 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.417654 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.519878 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.519928 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.519940 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.519960 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.519972 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.623323 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.623395 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.623422 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.623456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.623480 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.726334 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.726412 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.726436 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.726465 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.726492 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.728551 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 16:58:48.101906102 +0000 UTC Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.829456 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.829519 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.829543 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.829593 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.829615 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.933034 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.933132 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.933150 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.933175 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:07:59 crc kubenswrapper[4985]: I0125 00:07:59.933193 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:07:59Z","lastTransitionTime":"2026-01-25T00:07:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.035462 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.035568 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.035592 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.035618 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.035636 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.137622 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.137674 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.137689 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.137709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.137780 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.241229 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.241299 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.241323 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.241352 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.241375 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.274556 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:00 crc kubenswrapper[4985]: E0125 00:08:00.274787 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.286918 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.300212 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d159d4ffde7e84106d538217de84c5ab52f1ca8d4e480e874b535d7f0c5ef3d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:16Z\\\",\\\"message\\\":\\\"operator/iptables-alerter-4ln5h\\\\nI0125 00:07:16.475093 6651 default_network_controller.go:776] Recording success event on pod openshift-network-operator/network-operator-58b4c7f79c-55gtf\\\\nI0125 00:07:16.475049 6651 obj_retry.go:365] Adding new object: *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.475031 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-apiserver/kube-apiserver-crc after 0 failed attempt(s)\\\\nI0125 00:07:16.475146 6651 default_network_controller.go:776] Recording success event on pod openshift-kube-apiserver/kube-apiserver-crc\\\\nI0125 00:07:16.475018 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j after 0 failed attempt(s)\\\\nI0125 00:07:16.475161 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j\\\\nI0125 00:07:16.475146 6651 ovn.go:134] Ensuring zone local for Pod openshift-ovn-kubernetes/ovnkube-node-cc28q in node crc\\\\nI0125 00:07:16.475179 6651 obj_retry.go:386] Retry successful for *v1.Pod openshift-ovn-kubernetes/ovnkube-node-cc28q after 0 failed attempt(s)\\\\nI0125 00:07:16.475188 6651 default_network_controller.go:776] Recording success event on pod openshift-ovn-kubernetes/ovnkube-node-cc28q\\\\nI0125 00:07:16.474974 6651 \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:15Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:45Z\\\",\\\"message\\\":\\\" (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:07:45.643698 7045 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644281 7045 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644502 7045 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-api/cluster-autoscaler-operator for endpointslice openshift-machine-api/cluster-autoscaler-operator-hm4j7 as it is not a known egress service\\\\nI0125 00:07:45.644507 7045 master_controller.go:87] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644526 7045 external_controller.go:276] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644546 7045 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI0125 00:07:45.644557 7045 ovnkube.go:599] Stopped ovnkube\\\\nI0125 00:07:45.644565 7045 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 21.471µs\\\\nI0125 00:07:45.644586 7045 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0125 00:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.312694 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.331851 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.345036 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.345144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.345171 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.345201 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.345225 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.350482 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.364746 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.385515 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.408664 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98b4017b-02b3-4c2e-9d61-ebc765d69aa2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d16406949c0e3b968f27eb5903d09f4f6f00a048431e549dad52ed6ca874db3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd19b3d06299db37381da7ccb0a0c18b7218ac4b11086a71ff99f0b66fe8bd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0f5e5dd040b55d2048fcf8872f95b7f59b9d98b1f57b8db9279566a0fd7c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c0bd735ebf4924b28a8f190b638051fbfa6d711826cedaf478a0bb0350daed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b575ed7dde44ce49b3579b9d416b60d59d03bc782f4b098759ee2ca4ad97a347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.422530 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.440444 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.449038 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.449638 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.449823 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.450165 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.450481 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.458663 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.479701 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.499668 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.517910 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.532963 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.550590 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.553816 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.553891 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.553917 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.553946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.553970 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.567812 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.586460 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.602577 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:00Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.656749 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.656806 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.656829 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.656857 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.656878 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.729724 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 08:58:23.071274469 +0000 UTC Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.759604 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.759626 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.759633 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.759645 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.759653 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.862344 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.862540 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.862624 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.862686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.862751 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.964850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.964920 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.964947 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.964979 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:00 crc kubenswrapper[4985]: I0125 00:08:00.965005 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:00Z","lastTransitionTime":"2026-01-25T00:08:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.068858 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.068958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.068991 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.069026 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.069045 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.171565 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.171934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.172182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.172410 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.172595 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.273875 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.273897 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.274175 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:01 crc kubenswrapper[4985]: E0125 00:08:01.274205 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:01 crc kubenswrapper[4985]: E0125 00:08:01.274359 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:01 crc kubenswrapper[4985]: E0125 00:08:01.274507 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.275745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.275810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.275834 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.275859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.275881 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.379169 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.379238 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.379261 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.379289 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.379311 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.482060 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.482086 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.482094 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.482121 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.482133 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.585505 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.585619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.585645 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.585673 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.585694 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.688577 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.688617 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.688632 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.688654 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.688670 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.730925 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 10:47:27.739720147 +0000 UTC Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.791319 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.791351 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.791362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.791375 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.791387 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.894069 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.894161 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.894180 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.894202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.894225 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.997018 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.997062 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.997076 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.997094 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:01 crc kubenswrapper[4985]: I0125 00:08:01.997146 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:01Z","lastTransitionTime":"2026-01-25T00:08:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.100725 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.100809 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.100832 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.100872 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.100894 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.203298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.203368 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.203392 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.203422 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.203445 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.321639 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.322355 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:08:02 crc kubenswrapper[4985]: E0125 00:08:02.322549 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:08:02 crc kubenswrapper[4985]: E0125 00:08:02.322541 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.323359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.323404 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.323420 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.323441 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.323456 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.337771 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fa83abe-5c61-40a5-bf77-d8f929bdda78\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a639c7a2326c6fa68853bbeb5bd3f1e7a65e8097d9e009cbde0b446353601a8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-dvx5b\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-dddxc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.352917 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e4670a6b-7472-446f-82ed-65cf422de2e5\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7f5f3afd65d384d3a15aa4feac4649bea5fbf3b5aa3f314eb19a62a483119bbc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ed4fa079c270fd176aaf6bb587eaa6e5c1cec4af40b215d621ebc50343f62008\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ed4fa079c270fd176aaf6bb587eaa6e5c1cec4af40b215d621ebc50343f62008\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.366024 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.389826 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://de4748bf9298d89504576ed336b102ae88bd10da16b48bbf4f341750ac2b7339\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.403310 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e9a5fb92a1e827013fea8dfad7df6e4170d51c4f598e57f31736e77b1872e17d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4d2f40ce2457c8d98e0190122cada14940fda2ca554877b78c83b85d48ddeae8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.420495 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c4a302c2-5f69-46d5-b4da-7e4306ea3a3d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f20b33f2bceca3afb3def9d0b08a71dabb78505d163b564e594555bbed71758e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99e12d3ec505b7c6767fad3c8f55e8fa7232b3913d54afe90a1a298d26e95e11\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b0a9888b6eac9dc4b1013a161f5f9090a42fe85e56765bbd001b3062f1e0ed3c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://92368931a7d1ce722edc5f34407358dfe2a6a27c16a1a7517ff2213f2cd4bcac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ae3ae7018b82f800d46a69408e1b9ef00dd696d2b67435222c4be1dc2660b605\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://42f460d8d4037e896cbaff064f2cd4306b5c084af0c509a1365a29f5b5b86cd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eb6a94b07604f3992d61f025cd638d02c44a3dae3e7563790a322ec5f24ffcfd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:57Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n8zxg\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-dt2mv\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.425790 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.425852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.425882 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.425911 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.425933 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.448893 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"39723ce0-614f-4ada-9cc7-6efe79c7e51c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xc2dv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:04Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-cqtvp\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.465947 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4d2e3844-5209-406b-8b7d-90c980e6830d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769299602\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769299602\\\\\\\\\\\\\\\" (2026-01-24 23:06:42 +0000 UTC to 2027-01-24 23:06:42 +0000 UTC (now=2026-01-25 00:06:48.988155216 +0000 UTC))\\\\\\\"\\\\nI0125 00:06:48.988188 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0125 00:06:48.988207 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0125 00:06:48.988789 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988861 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0125 00:06:48.988916 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-4131166594/tls.crt::/tmp/serving-cert-4131166594/tls.key\\\\\\\"\\\\nI0125 00:06:48.989022 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0125 00:06:48.990400 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990422 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0125 00:06:48.990456 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0125 00:06:48.990466 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI0125 00:06:48.990580 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0125 00:06:48.990598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF0125 00:06:48.993656 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.479508 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://630cb0f6397add6d640cbe212eefd6e46f22c255b9c630df0e697a83040bcc7f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.491359 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-xzbbh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d1879c0f-3576-4f5a-9ac2-ada68270b8da\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://abfc1de8b7f8345d8956746baaf4a5fff3a58319808c9ec934af08b9fab719bb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-27ckk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-xzbbh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.511695 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-4w9l7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0294dfed-64df-4d3c-92de-7a93787780a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:36Z\\\",\\\"message\\\":\\\"2026-01-25T00:06:51+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115\\\\n2026-01-25T00:06:51+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_6db04f53-5cb0-4660-8c3e-d70eb8d04115 to /host/opt/cni/bin/\\\\n2026-01-25T00:06:51Z [verbose] multus-daemon started\\\\n2026-01-25T00:06:51Z [verbose] Readiness Indicator file check\\\\n2026-01-25T00:07:36Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t5zfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-multus\"/\"multus-4w9l7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.528704 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.528761 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.528778 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.528799 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.528818 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.537622 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"64cc3123-ba76-4365-86ae-c4cf7c09a805\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:52Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-25T00:07:45Z\\\",\\\"message\\\":\\\" (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0125 00:07:45.643698 7045 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644281 7045 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0125 00:07:45.644502 7045 egressservice_zone_endpointslice.go:80] Ignoring updating openshift-machine-api/cluster-autoscaler-operator for endpointslice openshift-machine-api/cluster-autoscaler-operator-hm4j7 as it is not a known egress service\\\\nI0125 00:07:45.644507 7045 master_controller.go:87] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644526 7045 external_controller.go:276] Starting Admin Policy Based Route Controller\\\\nI0125 00:07:45.644546 7045 egressservice_zone_node.go:110] Processing sync for Egress Service node crc\\\\nI0125 00:07:45.644557 7045 ovnkube.go:599] Stopped ovnkube\\\\nI0125 00:07:45.644565 7045 egressservice_zone_node.go:113] Finished syncing Egress Service node crc: 21.471µs\\\\nI0125 00:07:45.644586 7045 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0125 00:07:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-25T00:07:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:50Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqbqw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-cc28q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.553688 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-fcpqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e6a1e426-cc25-4015-ab79-402c7eecfafa\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:53Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aea67a9b0fb290b39dd7e1528354460a6fe069caac4afcfc0a46d6a1eae28e18\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pb7b5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:52Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-fcpqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.577869 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28571e28-42b9-4fb5-b9f3-14de280682b1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a3161e080c67c6cebd2744198265a598501e6c8a42de66d0b5009c23f15a7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://da7e7cba2632ff53c60344d1c9645b91cf9bb31330e2e202e48c4e7d280519f5\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1317fdfcb5a9f417c6902de2f380e66992ee1042ac64aaa7733b00e43d81b294\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.593228 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"effda373-9234-4f45-83f4-a07522fa05a7\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ebc8ef5ed458ecf36d2dbca7f31efb3cdd30a8c6f652f2bc4adfe01908061cf9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://66146e1867d01422601c070ea10822d47529df9fba22de06fec14fc0a8124455\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://576bfc5316d868ae8a28a9a77121be53f290b6d04dbf72bc22435f12ecd99ae5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef1d5d8207817b57bc382cfb3b403f325cc1e158d53b4f0980130e3c04613f25\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.615172 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.631820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.631870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.631882 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.631901 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.631919 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.637972 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:49Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.671133 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"98b4017b-02b3-4c2e-9d61-ebc765d69aa2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:33Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:06:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3d16406949c0e3b968f27eb5903d09f4f6f00a048431e549dad52ed6ca874db3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd19b3d06299db37381da7ccb0a0c18b7218ac4b11086a71ff99f0b66fe8bd32\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8d0f5e5dd040b55d2048fcf8872f95b7f59b9d98b1f57b8db9279566a0fd7c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52c0bd735ebf4924b28a8f190b638051fbfa6d711826cedaf478a0bb0350daed\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b575ed7dde44ce49b3579b9d416b60d59d03bc782f4b098759ee2ca4ad97a347\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:06:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://10d1c5e6549442b3160096ae3ad896fa230f6681874846cac6cd4530aa391170\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f247468f96053efabec80a3943301b824371b4db529e638153ea78de3a55c9b6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:31Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:31Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1b67e2fa943c3db959df46bfa879d91fc3fdcb46d14dc3f8ddebdda5d5f3b0ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-25T00:06:32Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-25T00:06:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:06:30Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.688976 4985 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a3ed24b0-a81a-4bc5-9218-446a83a8f78d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-25T00:07:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fa8dd6d7ac6ffa3214a1a6a4539d230445f1ffbf1989f0fb77be31eacde7d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://769742294a1ffd4551af4b1045e310ed6ab47e9254c01b9b251d6cafe0b48a64\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-25T00:07:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vhfbb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-25T00:07:02Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-l284j\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:02Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.731959 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 10:21:01.864096812 +0000 UTC Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.733831 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.733875 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.733896 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.733924 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.733940 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.836763 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.836810 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.836826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.836848 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.836863 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.938796 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.938841 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.938859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.938880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:02 crc kubenswrapper[4985]: I0125 00:08:02.938897 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:02Z","lastTransitionTime":"2026-01-25T00:08:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.041816 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.041858 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.041868 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.041880 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.041889 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.144770 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.144881 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.144920 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.144953 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.144974 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.248644 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.248698 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.248709 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.248726 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.248752 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.274462 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.274475 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:03 crc kubenswrapper[4985]: E0125 00:08:03.274824 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:03 crc kubenswrapper[4985]: E0125 00:08:03.274915 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.274495 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:03 crc kubenswrapper[4985]: E0125 00:08:03.275174 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.350908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.350946 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.350957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.350974 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.350986 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.453911 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.453985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.454004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.454030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.454049 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.557628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.557703 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.557725 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.557748 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.557761 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.660716 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.660803 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.660834 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.660868 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.660890 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.732467 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 03:45:03.607341221 +0000 UTC Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.763791 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.763847 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.763866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.763889 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.763908 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.866139 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.866178 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.866190 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.866214 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.866238 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.968870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.968921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.968934 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.968953 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:03 crc kubenswrapper[4985]: I0125 00:08:03.968967 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:03Z","lastTransitionTime":"2026-01-25T00:08:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.072029 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.072143 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.072173 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.072202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.072224 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.174682 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.174743 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.174760 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.174779 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.174792 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.274386 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:04 crc kubenswrapper[4985]: E0125 00:08:04.274963 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.277501 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.277547 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.277570 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.277599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.277623 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.380820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.380888 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.380904 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.380931 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.380949 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.483879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.483969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.484004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.484036 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.484060 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.587975 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.588039 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.588063 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.588097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.588150 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.691099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.691218 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.691239 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.691263 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.691284 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.733349 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 18:46:50.992223859 +0000 UTC Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.799845 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.799903 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.799920 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.799944 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.799965 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.902487 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.902566 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.902585 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.902609 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:04 crc kubenswrapper[4985]: I0125 00:08:04.902628 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:04Z","lastTransitionTime":"2026-01-25T00:08:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.005316 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.005369 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.005388 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.005413 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.005431 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.109046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.109167 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.109193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.109217 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.109239 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.130985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.131050 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.131162 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.131200 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.131223 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.153576 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.158937 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.159007 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.159029 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.159060 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.159084 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.185858 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.191586 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.191641 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.191657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.191679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.191696 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.213519 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.218202 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.218291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.218365 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.218388 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.218405 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.239916 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.246039 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.246097 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.246153 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.246183 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.246201 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.267612 4985 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-25T00:08:05Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"c1c20c60-871f-4427-926e-8b5954451554\\\",\\\"systemUUID\\\":\\\"66207c91-b7c1-4e06-9d97-3e311fb7e34e\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-25T00:08:05Z is after 2025-08-24T17:21:41Z" Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.267878 4985 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.270363 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.270446 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.270473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.270498 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.270516 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.273661 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.273705 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.273739 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.273820 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.273931 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:05 crc kubenswrapper[4985]: E0125 00:08:05.274090 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.373696 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.373730 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.373747 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.373767 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.373785 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.477617 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.477693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.477716 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.477745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.477768 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.581062 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.581145 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.581163 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.581189 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.581205 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.683649 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.683693 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.683717 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.683733 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.683745 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.734087 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 17:36:45.64973964 +0000 UTC Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.787075 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.787159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.787177 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.787201 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.787219 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.890209 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.890311 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.890327 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.890345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.890357 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.993589 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.993660 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.993678 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.993703 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:05 crc kubenswrapper[4985]: I0125 00:08:05.993721 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:05Z","lastTransitionTime":"2026-01-25T00:08:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.096454 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.096524 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.096539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.096557 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.096569 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.199268 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.199324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.199345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.199369 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.199387 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.274500 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:06 crc kubenswrapper[4985]: E0125 00:08:06.274746 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.302348 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.302410 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.302428 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.302451 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.302470 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.405863 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.405925 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.405942 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.405964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.405981 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.508716 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.508789 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.508807 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.508831 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.508848 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.612184 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.612246 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.612263 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.612287 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.612305 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.715987 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.716150 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.716181 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.716215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.716239 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.735066 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 18:58:59.561441008 +0000 UTC Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.819225 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.819306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.819329 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.819359 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.819379 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.922894 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.922991 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.923052 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.923080 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:06 crc kubenswrapper[4985]: I0125 00:08:06.923100 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:06Z","lastTransitionTime":"2026-01-25T00:08:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.026547 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.026602 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.026619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.026640 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.026658 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.129978 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.130047 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.130067 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.130092 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.130146 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.233326 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.233431 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.233483 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.233507 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.233526 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.274275 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.274334 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.274305 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:07 crc kubenswrapper[4985]: E0125 00:08:07.274485 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:07 crc kubenswrapper[4985]: E0125 00:08:07.274627 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:07 crc kubenswrapper[4985]: E0125 00:08:07.274772 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.336584 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.336665 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.336691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.336720 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.336743 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.440342 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.440419 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.440446 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.440478 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.440496 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.543424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.543497 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.543515 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.543540 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.543559 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.647259 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.647322 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.647339 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.647362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.647379 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.735637 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 13:21:19.619113841 +0000 UTC Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.749723 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.749787 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.749806 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.749830 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.749848 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.852892 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.852957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.852974 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.852997 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.853015 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.955439 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.955497 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.955514 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.955536 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:07 crc kubenswrapper[4985]: I0125 00:08:07.955553 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:07Z","lastTransitionTime":"2026-01-25T00:08:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.058619 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.058682 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.058699 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.058722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.058739 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.161876 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.161958 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.161979 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.162003 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.162021 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.186795 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:08 crc kubenswrapper[4985]: E0125 00:08:08.186963 4985 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:08:08 crc kubenswrapper[4985]: E0125 00:08:08.187046 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs podName:39723ce0-614f-4ada-9cc7-6efe79c7e51c nodeName:}" failed. No retries permitted until 2026-01-25 00:09:12.18702351 +0000 UTC m=+162.218959823 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs") pod "network-metrics-daemon-cqtvp" (UID: "39723ce0-614f-4ada-9cc7-6efe79c7e51c") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.264969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.265223 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.265248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.265273 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.265295 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.274751 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:08 crc kubenswrapper[4985]: E0125 00:08:08.274930 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.368004 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.368075 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.368091 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.368143 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.368161 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.471272 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.471338 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.471358 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.471383 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.471400 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.574744 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.574814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.574838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.574862 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.574879 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.677750 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.677826 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.677850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.677879 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.677905 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.735851 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 12:11:40.784340934 +0000 UTC Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.780921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.781029 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.781046 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.781073 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.781089 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.883015 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.883056 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.883068 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.883083 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.883095 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.986426 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.986495 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.986521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.986553 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:08 crc kubenswrapper[4985]: I0125 00:08:08.986579 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:08Z","lastTransitionTime":"2026-01-25T00:08:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.090009 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.090085 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.090144 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.090168 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.090189 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.193020 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.193082 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.193099 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.193159 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.193177 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.273680 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.273802 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.273680 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:09 crc kubenswrapper[4985]: E0125 00:08:09.273882 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:09 crc kubenswrapper[4985]: E0125 00:08:09.274032 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:09 crc kubenswrapper[4985]: E0125 00:08:09.274215 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.302620 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.302695 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.302718 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.302747 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.302769 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.405541 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.405607 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.405663 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.405689 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.405707 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.509203 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.509253 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.509264 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.509282 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.509295 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.612451 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.612539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.612564 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.612621 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.612642 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.715133 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.715193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.715215 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.715243 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.715263 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.736419 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 05:46:05.815373154 +0000 UTC Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.818306 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.818362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.818380 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.818401 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.818418 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.921193 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.921362 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.921390 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.921468 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:09 crc kubenswrapper[4985]: I0125 00:08:09.921673 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:09Z","lastTransitionTime":"2026-01-25T00:08:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.024722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.024789 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.024808 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.024830 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.024848 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.128024 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.128178 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.128236 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.128266 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.128288 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.231403 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.231462 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.231481 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.231507 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.231523 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.273991 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:10 crc kubenswrapper[4985]: E0125 00:08:10.274240 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.328482 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-4w9l7" podStartSLOduration=81.328455241 podStartE2EDuration="1m21.328455241s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.327827444 +0000 UTC m=+100.359763797" watchObservedRunningTime="2026-01-25 00:08:10.328455241 +0000 UTC m=+100.360391554" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.328779 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-xzbbh" podStartSLOduration=82.328765879 podStartE2EDuration="1m22.328765879s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.304188011 +0000 UTC m=+100.336124334" watchObservedRunningTime="2026-01-25 00:08:10.328765879 +0000 UTC m=+100.360702192" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.334871 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.334944 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.334967 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.334997 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.335022 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.363169 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-dt2mv" podStartSLOduration=81.363155105 podStartE2EDuration="1m21.363155105s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.362813796 +0000 UTC m=+100.394750089" watchObservedRunningTime="2026-01-25 00:08:10.363155105 +0000 UTC m=+100.395091388" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.413980 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=81.413967144 podStartE2EDuration="1m21.413967144s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.413186403 +0000 UTC m=+100.445122776" watchObservedRunningTime="2026-01-25 00:08:10.413967144 +0000 UTC m=+100.445903427" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.437889 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.438192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.438373 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.438540 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.438680 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.510240 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-fcpqg" podStartSLOduration=82.510212549 podStartE2EDuration="1m22.510212549s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.509341736 +0000 UTC m=+100.541278039" watchObservedRunningTime="2026-01-25 00:08:10.510212549 +0000 UTC m=+100.542148862" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.546561 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.546798 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.546869 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.546959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.547018 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.547734 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=47.547707407 podStartE2EDuration="47.547707407s" podCreationTimestamp="2026-01-25 00:07:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.545888209 +0000 UTC m=+100.577824532" watchObservedRunningTime="2026-01-25 00:08:10.547707407 +0000 UTC m=+100.579643720" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.548346 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=80.548335514 podStartE2EDuration="1m20.548335514s" podCreationTimestamp="2026-01-25 00:06:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.528240855 +0000 UTC m=+100.560177138" watchObservedRunningTime="2026-01-25 00:08:10.548335514 +0000 UTC m=+100.580271817" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.595422 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=21.595402514 podStartE2EDuration="21.595402514s" podCreationTimestamp="2026-01-25 00:07:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.580346007 +0000 UTC m=+100.612282300" watchObservedRunningTime="2026-01-25 00:08:10.595402514 +0000 UTC m=+100.627338797" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.623750 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-l284j" podStartSLOduration=81.62372433 podStartE2EDuration="1m21.62372433s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.595749703 +0000 UTC m=+100.627686006" watchObservedRunningTime="2026-01-25 00:08:10.62372433 +0000 UTC m=+100.655660633" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.649473 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.649539 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.649558 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.649582 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.649598 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.665428 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=10.665404088 podStartE2EDuration="10.665404088s" podCreationTimestamp="2026-01-25 00:08:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.66470471 +0000 UTC m=+100.696641033" watchObservedRunningTime="2026-01-25 00:08:10.665404088 +0000 UTC m=+100.697340401" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.665996 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podStartSLOduration=81.665986674 podStartE2EDuration="1m21.665986674s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:10.654732617 +0000 UTC m=+100.686668930" watchObservedRunningTime="2026-01-25 00:08:10.665986674 +0000 UTC m=+100.697922987" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.737196 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 01:40:05.564812411 +0000 UTC Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.752182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.752248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.752269 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.752298 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.752322 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.854944 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.854993 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.855009 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.855030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.855047 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.957552 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.957632 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.957657 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.957686 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:10 crc kubenswrapper[4985]: I0125 00:08:10.957708 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:10Z","lastTransitionTime":"2026-01-25T00:08:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.060921 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.060987 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.061006 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.061030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.061050 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.164591 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.164650 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.164666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.164688 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.164706 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.267936 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.268014 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.268048 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.268079 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.268102 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.274225 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.274265 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.274508 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:11 crc kubenswrapper[4985]: E0125 00:08:11.274497 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:11 crc kubenswrapper[4985]: E0125 00:08:11.274658 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:11 crc kubenswrapper[4985]: E0125 00:08:11.274790 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.372529 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.372588 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.372615 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.372645 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.372667 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.476030 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.476649 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.476830 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.476970 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.477182 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.579885 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.579953 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.579974 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.580614 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.580657 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.684388 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.684471 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.684497 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.684530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.684554 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.737961 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 05:06:56.533563675 +0000 UTC Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.787071 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.787331 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.787402 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.787461 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.787521 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.889546 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.889599 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.889616 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.889638 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.889656 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.992870 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.992980 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.993000 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.993027 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:11 crc kubenswrapper[4985]: I0125 00:08:11.993053 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:11Z","lastTransitionTime":"2026-01-25T00:08:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.096304 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.096370 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.096410 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.096435 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.096450 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.199587 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.199679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.199698 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.199722 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.199739 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.289017 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:12 crc kubenswrapper[4985]: E0125 00:08:12.289257 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.303076 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.303393 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.303537 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.303670 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.303817 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.406862 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.406935 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.406955 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.406982 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.407001 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.510218 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.510273 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.510281 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.510295 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.510304 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.612959 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.613017 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.613036 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.613057 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.613073 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.716139 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.716182 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.716191 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.716206 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.716216 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.738677 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 22:24:49.825999242 +0000 UTC Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.817814 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.817850 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.817859 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.817877 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.817890 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.920726 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.920801 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.920824 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.920852 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:12 crc kubenswrapper[4985]: I0125 00:08:12.920873 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:12Z","lastTransitionTime":"2026-01-25T00:08:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.023170 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.023242 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.023262 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.023291 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.023310 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.125426 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.125499 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.125517 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.125537 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.125549 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.227563 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.227616 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.227628 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.227666 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.227679 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.274473 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.274549 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:13 crc kubenswrapper[4985]: E0125 00:08:13.274708 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.274787 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:13 crc kubenswrapper[4985]: E0125 00:08:13.274866 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:13 crc kubenswrapper[4985]: E0125 00:08:13.274924 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.330352 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.330413 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.330424 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.330439 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.330515 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.433836 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.433866 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.433877 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.433891 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.433904 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.535908 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.535969 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.535985 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.536009 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.536026 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.638534 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.638838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.638930 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.639016 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.639100 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.739856 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 09:02:26.278997625 +0000 UTC Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.741490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.741530 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.741546 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.741563 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.741574 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.844771 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.844844 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.844860 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.844877 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.844886 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.947884 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.947929 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.947942 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.947957 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:13 crc kubenswrapper[4985]: I0125 00:08:13.947966 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:13Z","lastTransitionTime":"2026-01-25T00:08:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.052582 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.052637 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.052655 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.052679 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.052698 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.155364 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.155472 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.155490 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.155517 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.155536 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.258192 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.258238 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.258250 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.258267 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.258280 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.274615 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:14 crc kubenswrapper[4985]: E0125 00:08:14.274762 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.360495 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.360752 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.360820 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.360900 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.360984 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.464324 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.464745 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.464901 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.465025 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.465176 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.568615 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.568964 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.569148 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.569299 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.569432 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.672441 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.672521 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.672543 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.672570 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.672591 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.741022 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-20 06:20:01.594546588 +0000 UTC Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.775651 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.775714 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.775733 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.775756 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.775773 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.878731 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.878804 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.878821 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.878845 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.878863 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.981720 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.981805 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.981818 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.981838 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:14 crc kubenswrapper[4985]: I0125 00:08:14.981852 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:14Z","lastTransitionTime":"2026-01-25T00:08:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.085198 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.085237 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.085248 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.085265 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.085276 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:15Z","lastTransitionTime":"2026-01-25T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.189886 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.189947 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.189967 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.189997 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.190018 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:15Z","lastTransitionTime":"2026-01-25T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.274648 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.274704 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:15 crc kubenswrapper[4985]: E0125 00:08:15.274895 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.274985 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:15 crc kubenswrapper[4985]: E0125 00:08:15.275207 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:15 crc kubenswrapper[4985]: E0125 00:08:15.275351 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.293584 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.293646 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.293665 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.293691 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.293711 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:15Z","lastTransitionTime":"2026-01-25T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.328222 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.328294 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.328315 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.328345 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.328370 4985 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-25T00:08:15Z","lastTransitionTime":"2026-01-25T00:08:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.397348 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt"] Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.397731 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.399773 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.400466 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.400912 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.403496 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.473857 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.473972 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.474083 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.474133 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.474170 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575068 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575215 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575255 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575313 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575392 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575369 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.575715 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.577555 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-service-ca\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.582442 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.595240 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-mwbbt\" (UID: \"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.724322 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.741732 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-24 07:16:30.832832182 +0000 UTC Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.741784 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 25 00:08:15 crc kubenswrapper[4985]: W0125 00:08:15.743447 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podebc88ab1_5763_4ba0_a5e1_f24fc66eb1f2.slice/crio-5d6557503a70cc5c874c0f1ff765eba5d961c403eb29d49af4464a47a3dbb65f WatchSource:0}: Error finding container 5d6557503a70cc5c874c0f1ff765eba5d961c403eb29d49af4464a47a3dbb65f: Status 404 returned error can't find the container with id 5d6557503a70cc5c874c0f1ff765eba5d961c403eb29d49af4464a47a3dbb65f Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.753216 4985 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 25 00:08:15 crc kubenswrapper[4985]: I0125 00:08:15.919201 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" event={"ID":"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2","Type":"ContainerStarted","Data":"5d6557503a70cc5c874c0f1ff765eba5d961c403eb29d49af4464a47a3dbb65f"} Jan 25 00:08:16 crc kubenswrapper[4985]: I0125 00:08:16.274319 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:16 crc kubenswrapper[4985]: E0125 00:08:16.274934 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:16 crc kubenswrapper[4985]: I0125 00:08:16.276094 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:08:16 crc kubenswrapper[4985]: E0125 00:08:16.276528 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-cc28q_openshift-ovn-kubernetes(64cc3123-ba76-4365-86ae-c4cf7c09a805)\"" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" Jan 25 00:08:16 crc kubenswrapper[4985]: I0125 00:08:16.924299 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" event={"ID":"ebc88ab1-5763-4ba0-a5e1-f24fc66eb1f2","Type":"ContainerStarted","Data":"db54c8da5b7ba9da7815886f47492ac9ce0204b6bb1d2d02a637a7c8af446704"} Jan 25 00:08:17 crc kubenswrapper[4985]: I0125 00:08:17.273586 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:17 crc kubenswrapper[4985]: I0125 00:08:17.273725 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:17 crc kubenswrapper[4985]: E0125 00:08:17.273901 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:17 crc kubenswrapper[4985]: I0125 00:08:17.273955 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:17 crc kubenswrapper[4985]: E0125 00:08:17.274083 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:17 crc kubenswrapper[4985]: E0125 00:08:17.274370 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:18 crc kubenswrapper[4985]: I0125 00:08:18.274024 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:18 crc kubenswrapper[4985]: E0125 00:08:18.274353 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:19 crc kubenswrapper[4985]: I0125 00:08:19.274637 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:19 crc kubenswrapper[4985]: I0125 00:08:19.274710 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:19 crc kubenswrapper[4985]: I0125 00:08:19.274638 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:19 crc kubenswrapper[4985]: E0125 00:08:19.274811 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:19 crc kubenswrapper[4985]: E0125 00:08:19.274980 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:19 crc kubenswrapper[4985]: E0125 00:08:19.275086 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:20 crc kubenswrapper[4985]: I0125 00:08:20.274434 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:20 crc kubenswrapper[4985]: E0125 00:08:20.275349 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:21 crc kubenswrapper[4985]: I0125 00:08:21.274197 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:21 crc kubenswrapper[4985]: I0125 00:08:21.274306 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:21 crc kubenswrapper[4985]: E0125 00:08:21.274616 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:21 crc kubenswrapper[4985]: E0125 00:08:21.274793 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:21 crc kubenswrapper[4985]: I0125 00:08:21.274358 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:21 crc kubenswrapper[4985]: E0125 00:08:21.274896 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.274100 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:22 crc kubenswrapper[4985]: E0125 00:08:22.274394 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.944989 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/1.log" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.945971 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/0.log" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.946084 4985 generic.go:334] "Generic (PLEG): container finished" podID="0294dfed-64df-4d3c-92de-7a93787780a2" containerID="37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e" exitCode=1 Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.946286 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerDied","Data":"37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e"} Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.946395 4985 scope.go:117] "RemoveContainer" containerID="2b91e7db6e134fdd531255253320fe50f23c5d1ac2d384dad25a55fc35319473" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.946982 4985 scope.go:117] "RemoveContainer" containerID="37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e" Jan 25 00:08:22 crc kubenswrapper[4985]: E0125 00:08:22.947232 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-4w9l7_openshift-multus(0294dfed-64df-4d3c-92de-7a93787780a2)\"" pod="openshift-multus/multus-4w9l7" podUID="0294dfed-64df-4d3c-92de-7a93787780a2" Jan 25 00:08:22 crc kubenswrapper[4985]: I0125 00:08:22.967725 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-mwbbt" podStartSLOduration=93.967702948 podStartE2EDuration="1m33.967702948s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:16.942505123 +0000 UTC m=+106.974441456" watchObservedRunningTime="2026-01-25 00:08:22.967702948 +0000 UTC m=+112.999639261" Jan 25 00:08:23 crc kubenswrapper[4985]: I0125 00:08:23.273754 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:23 crc kubenswrapper[4985]: I0125 00:08:23.273764 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:23 crc kubenswrapper[4985]: E0125 00:08:23.273885 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:23 crc kubenswrapper[4985]: E0125 00:08:23.273971 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:23 crc kubenswrapper[4985]: I0125 00:08:23.273782 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:23 crc kubenswrapper[4985]: E0125 00:08:23.274031 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:23 crc kubenswrapper[4985]: I0125 00:08:23.952533 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/1.log" Jan 25 00:08:24 crc kubenswrapper[4985]: I0125 00:08:24.274747 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:24 crc kubenswrapper[4985]: E0125 00:08:24.274947 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:25 crc kubenswrapper[4985]: I0125 00:08:25.273867 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:25 crc kubenswrapper[4985]: E0125 00:08:25.275021 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:25 crc kubenswrapper[4985]: I0125 00:08:25.273927 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:25 crc kubenswrapper[4985]: E0125 00:08:25.275512 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:25 crc kubenswrapper[4985]: I0125 00:08:25.273884 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:25 crc kubenswrapper[4985]: E0125 00:08:25.275894 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:26 crc kubenswrapper[4985]: I0125 00:08:26.273945 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:26 crc kubenswrapper[4985]: E0125 00:08:26.274193 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.273895 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.273911 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.274092 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:27 crc kubenswrapper[4985]: E0125 00:08:27.274315 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:27 crc kubenswrapper[4985]: E0125 00:08:27.274972 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:27 crc kubenswrapper[4985]: E0125 00:08:27.275155 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.275479 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.970325 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/3.log" Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.973663 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerStarted","Data":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:08:27 crc kubenswrapper[4985]: I0125 00:08:27.975029 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:08:28 crc kubenswrapper[4985]: I0125 00:08:28.018382 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podStartSLOduration=99.018354959 podStartE2EDuration="1m39.018354959s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:28.014899968 +0000 UTC m=+118.046836281" watchObservedRunningTime="2026-01-25 00:08:28.018354959 +0000 UTC m=+118.050291272" Jan 25 00:08:28 crc kubenswrapper[4985]: I0125 00:08:28.274233 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:28 crc kubenswrapper[4985]: E0125 00:08:28.274384 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:28 crc kubenswrapper[4985]: I0125 00:08:28.329546 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cqtvp"] Jan 25 00:08:28 crc kubenswrapper[4985]: I0125 00:08:28.977313 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:28 crc kubenswrapper[4985]: E0125 00:08:28.978368 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:29 crc kubenswrapper[4985]: I0125 00:08:29.273720 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:29 crc kubenswrapper[4985]: I0125 00:08:29.273831 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:29 crc kubenswrapper[4985]: E0125 00:08:29.273898 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:29 crc kubenswrapper[4985]: I0125 00:08:29.273934 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:29 crc kubenswrapper[4985]: E0125 00:08:29.274096 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:29 crc kubenswrapper[4985]: E0125 00:08:29.274203 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:30 crc kubenswrapper[4985]: I0125 00:08:30.273690 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:30 crc kubenswrapper[4985]: E0125 00:08:30.276064 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:30 crc kubenswrapper[4985]: E0125 00:08:30.284227 4985 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 25 00:08:30 crc kubenswrapper[4985]: E0125 00:08:30.404083 4985 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 25 00:08:31 crc kubenswrapper[4985]: I0125 00:08:31.273947 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:31 crc kubenswrapper[4985]: I0125 00:08:31.274032 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:31 crc kubenswrapper[4985]: I0125 00:08:31.274063 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:31 crc kubenswrapper[4985]: E0125 00:08:31.274254 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:31 crc kubenswrapper[4985]: E0125 00:08:31.274562 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:31 crc kubenswrapper[4985]: E0125 00:08:31.274776 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:32 crc kubenswrapper[4985]: I0125 00:08:32.274204 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:32 crc kubenswrapper[4985]: E0125 00:08:32.274455 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:33 crc kubenswrapper[4985]: I0125 00:08:33.274020 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:33 crc kubenswrapper[4985]: I0125 00:08:33.274095 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:33 crc kubenswrapper[4985]: I0125 00:08:33.274034 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:33 crc kubenswrapper[4985]: E0125 00:08:33.274458 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:33 crc kubenswrapper[4985]: I0125 00:08:33.274670 4985 scope.go:117] "RemoveContainer" containerID="37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e" Jan 25 00:08:33 crc kubenswrapper[4985]: E0125 00:08:33.274712 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:33 crc kubenswrapper[4985]: E0125 00:08:33.274799 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:34 crc kubenswrapper[4985]: I0125 00:08:34.000750 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/1.log" Jan 25 00:08:34 crc kubenswrapper[4985]: I0125 00:08:34.001282 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerStarted","Data":"70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b"} Jan 25 00:08:34 crc kubenswrapper[4985]: I0125 00:08:34.273937 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:34 crc kubenswrapper[4985]: E0125 00:08:34.274272 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:35 crc kubenswrapper[4985]: I0125 00:08:35.274548 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:35 crc kubenswrapper[4985]: I0125 00:08:35.274606 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:35 crc kubenswrapper[4985]: E0125 00:08:35.274660 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:35 crc kubenswrapper[4985]: I0125 00:08:35.274716 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:35 crc kubenswrapper[4985]: E0125 00:08:35.274798 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:35 crc kubenswrapper[4985]: E0125 00:08:35.275019 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:35 crc kubenswrapper[4985]: E0125 00:08:35.405760 4985 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 25 00:08:36 crc kubenswrapper[4985]: I0125 00:08:36.274730 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:36 crc kubenswrapper[4985]: E0125 00:08:36.276020 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:37 crc kubenswrapper[4985]: I0125 00:08:37.274387 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:37 crc kubenswrapper[4985]: I0125 00:08:37.274499 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:37 crc kubenswrapper[4985]: E0125 00:08:37.274561 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:37 crc kubenswrapper[4985]: E0125 00:08:37.274683 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:37 crc kubenswrapper[4985]: I0125 00:08:37.274387 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:37 crc kubenswrapper[4985]: E0125 00:08:37.274817 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:38 crc kubenswrapper[4985]: I0125 00:08:38.274303 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:38 crc kubenswrapper[4985]: E0125 00:08:38.274479 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:39 crc kubenswrapper[4985]: I0125 00:08:39.274530 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:39 crc kubenswrapper[4985]: I0125 00:08:39.274595 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:39 crc kubenswrapper[4985]: I0125 00:08:39.274656 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:39 crc kubenswrapper[4985]: E0125 00:08:39.274726 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 25 00:08:39 crc kubenswrapper[4985]: E0125 00:08:39.274861 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 25 00:08:39 crc kubenswrapper[4985]: E0125 00:08:39.274986 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 25 00:08:40 crc kubenswrapper[4985]: I0125 00:08:40.274003 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:40 crc kubenswrapper[4985]: E0125 00:08:40.275800 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-cqtvp" podUID="39723ce0-614f-4ada-9cc7-6efe79c7e51c" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.274221 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.274322 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.274253 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.277662 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.277714 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.277823 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 25 00:08:41 crc kubenswrapper[4985]: I0125 00:08:41.277991 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 25 00:08:42 crc kubenswrapper[4985]: I0125 00:08:42.274593 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:08:42 crc kubenswrapper[4985]: I0125 00:08:42.280157 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 25 00:08:42 crc kubenswrapper[4985]: I0125 00:08:42.280268 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.725974 4985 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.787188 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ctsj5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.788177 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.790131 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.790754 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.791934 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.792034 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.799349 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.800170 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.800847 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.801178 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.801898 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802702 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.801931 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802232 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802287 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802390 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802435 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802433 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802541 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802590 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.802804 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.818737 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.819094 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.819327 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.819436 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.819851 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.820504 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.821435 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bnmp5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.822558 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.822732 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-pruner-29488320-65m92"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.824206 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29vvw"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.824408 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.824579 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.824835 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dccnf"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.825289 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.825947 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sffms"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.826442 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.828091 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.828610 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.830369 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.830950 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.833202 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-54sg5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.833597 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.834198 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.837695 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.838048 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.838170 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.838822 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"pruner-dockercfg-p7bcw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.839303 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.839523 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.839664 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.840190 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.844294 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.844499 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.844584 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.844619 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.844785 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.845731 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.845904 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"serviceca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.846101 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.849307 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.854008 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.855392 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.856334 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.870289 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.870590 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.870913 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.871470 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.871574 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.871785 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.871466 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.872270 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.872481 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.872498 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.873170 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.873586 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.874160 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.875632 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t7bhx"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.876051 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.876350 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.876584 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.878040 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.878083 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.878733 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.879659 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/de88820c-7cff-4928-8f36-9ec785accadc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.879693 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/de88820c-7cff-4928-8f36-9ec785accadc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.879805 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9qchq\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-kube-api-access-9qchq\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.879860 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.881991 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.884946 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-5z29b"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.885006 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.885148 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.885281 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-54vvw"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.885521 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.885898 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.886271 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.886454 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892083 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892342 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892358 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892405 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892127 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892468 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892163 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892194 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892548 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892579 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892260 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892627 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892281 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892668 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892669 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892300 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892587 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892740 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892851 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892865 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892912 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.892853 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893007 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893021 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893206 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893309 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893443 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893791 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.893930 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.894095 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.894209 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.894346 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.894908 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.895068 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.896696 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.897091 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.897464 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.897783 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g229p"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.898478 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.899486 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.899515 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.906517 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.909148 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.912391 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mpnhl"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.912715 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.930171 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.930315 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.930599 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932014 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932077 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932364 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932363 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932469 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932677 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.932786 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.933265 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.934804 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.935137 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.935460 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-764mr"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.935941 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.936386 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.936607 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.936736 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zp4dh"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.937093 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.937266 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.938605 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.938909 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.940661 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.940843 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.940906 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.941809 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.942252 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.943407 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.943649 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.944007 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.944154 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.945402 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.945610 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.945873 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.945921 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.946680 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.946976 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.947527 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.947643 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.948066 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.950714 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.950896 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.951519 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.955877 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pnk89"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.956434 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.957033 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.959179 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.959206 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.960591 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ctsj5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.961658 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.964350 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.965346 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.966366 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29488320-65m92"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.970300 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.971635 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4vgrr"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.972492 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g229p"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.972527 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.973826 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29vvw"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.985199 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986709 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-plugins-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986778 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vhfc\" (UniqueName: \"kubernetes.io/projected/72c63de9-5d4f-4037-b70e-11ddf9a4904c-kube-api-access-4vhfc\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986847 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7x89\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-kube-api-access-z7x89\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986885 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fchn\" (UniqueName: \"kubernetes.io/projected/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-kube-api-access-5fchn\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986931 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.986975 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-config\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987020 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c5zwc\" (UniqueName: \"kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987060 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnhmp\" (UniqueName: \"kubernetes.io/projected/b82d04cc-00d3-43dc-8317-dacb594c8b61-kube-api-access-pnhmp\") pod \"migrator-59844c95c7-bfzz6\" (UID: \"b82d04cc-00d3-43dc-8317-dacb594c8b61\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987208 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987253 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987287 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw6k9\" (UniqueName: \"kubernetes.io/projected/beb34140-c131-478f-94d6-c4b5433b58e9-kube-api-access-nw6k9\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987438 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.987548 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988056 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988210 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988270 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-serving-cert\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988293 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-policies\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988322 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988347 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-socket-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988383 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/de88820c-7cff-4928-8f36-9ec785accadc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988416 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-htvrt\" (UniqueName: \"kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988442 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988489 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-serving-cert\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988515 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8c49802-43de-4e97-8067-4824c3312194-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988535 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-node-pullsecrets\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988562 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988589 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3fa96974-2f91-4b24-b80d-4b221107adbe-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988606 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-dir\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988707 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fa96974-2f91-4b24-b80d-4b221107adbe-serving-cert\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.988761 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvrd6\" (UniqueName: \"kubernetes.io/projected/b56b49d8-11f3-49bc-bad7-d24bd00f0589-kube-api-access-nvrd6\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989126 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989155 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-client\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989250 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-registration-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989279 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989302 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d46a176e-d2cd-41cc-8420-37762bc47cd3-metrics-tls\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989371 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk4hg\" (UniqueName: \"kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989390 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-oauth-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989454 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf25816-bff6-42fc-8e43-513b490e830b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989482 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a8c49802-43de-4e97-8067-4824c3312194-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989509 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/585479e7-a937-42f2-9802-2117e25c68c1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989556 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtw5w\" (UniqueName: \"kubernetes.io/projected/4ee6bec9-a188-48bb-b49b-eeae08e55158-kube-api-access-rtw5w\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989577 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/de88820c-7cff-4928-8f36-9ec785accadc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989604 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989624 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-csi-data-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989650 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-client\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989740 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8c49802-43de-4e97-8067-4824c3312194-config\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.989621 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bnmp5"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.990352 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-srv-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.990509 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.990799 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.990827 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9pqdh\" (UniqueName: \"kubernetes.io/projected/97676058-3567-4d0a-b8da-ad5890e39080-kube-api-access-9pqdh\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.990976 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991071 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdqzs\" (UniqueName: \"kubernetes.io/projected/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-kube-api-access-vdqzs\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991112 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991138 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991341 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991371 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991405 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-image-import-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991434 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqj6b\" (UniqueName: \"kubernetes.io/projected/47fe0371-d4d4-40e2-905f-5e26db186cbe-kube-api-access-tqj6b\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991600 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97676058-3567-4d0a-b8da-ad5890e39080-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991753 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-oauth-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991811 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk66t\" (UniqueName: \"kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991824 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991841 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991855 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/de88820c-7cff-4928-8f36-9ec785accadc-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991887 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991918 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991966 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jl5zg\" (UniqueName: \"kubernetes.io/projected/16f872f6-c454-452f-adf9-bee0a76ebe2b-kube-api-access-jl5zg\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.991989 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992016 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhtl5\" (UniqueName: \"kubernetes.io/projected/25a97396-e69d-40d6-8734-95b5aaec338f-kube-api-access-lhtl5\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992064 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992163 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992218 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brg6q\" (UniqueName: \"kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992335 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit-dir\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992411 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992443 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-serving-cert\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992520 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nhr7l\" (UniqueName: \"kubernetes.io/projected/ae20ca57-847f-4344-9718-aa179543b4ae-kube-api-access-nhr7l\") pod \"downloads-7954f5f757-54sg5\" (UID: \"ae20ca57-847f-4344-9718-aa179543b4ae\") " pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992585 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d46a176e-d2cd-41cc-8420-37762bc47cd3-trusted-ca\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992702 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992739 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/585479e7-a937-42f2-9802-2117e25c68c1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992762 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22qr6\" (UniqueName: \"kubernetes.io/projected/5dd173ed-ab63-4006-9e39-2f4abf301a8e-kube-api-access-22qr6\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992821 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.992938 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.993128 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwf89\" (UniqueName: \"kubernetes.io/projected/3fa96974-2f91-4b24-b80d-4b221107adbe-kube-api-access-xwf89\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.993298 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.993321 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:08:45 crc kubenswrapper[4985]: I0125 00:08:45.993370 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29hwm\" (UniqueName: \"kubernetes.io/projected/71626fcf-108c-42c4-95da-d634b73f587f-kube-api-access-29hwm\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:45.996824 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/de88820c-7cff-4928-8f36-9ec785accadc-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:45.999223 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tjcw\" (UniqueName: \"kubernetes.io/projected/e74fd6cc-f34d-41c4-8d01-0f556277340d-kube-api-access-6tjcw\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:45.999260 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97676058-3567-4d0a-b8da-ad5890e39080-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:45.999285 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:45.999395 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-profile-collector-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000209 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000259 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000853 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000903 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/585479e7-a937-42f2-9802-2117e25c68c1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000971 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-images\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.000987 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-trusted-ca-bundle\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001006 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4gjfm\" (UniqueName: \"kubernetes.io/projected/cbf25816-bff6-42fc-8e43-513b490e830b-kube-api-access-4gjfm\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001045 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-client\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001063 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-config\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001091 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/067ed730-bfcc-4d6e-84d4-28c57fa90343-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001122 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e74fd6cc-f34d-41c4-8d01-0f556277340d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001142 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-mountpoint-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001184 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-service-ca\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001523 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001600 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001630 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001675 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-config\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001699 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-serving-cert\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001718 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-encryption-config\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001736 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-service-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001759 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001789 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/beb34140-c131-478f-94d6-c4b5433b58e9-machine-approver-tls\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001823 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/067ed730-bfcc-4d6e-84d4-28c57fa90343-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001854 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001902 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-encryption-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001941 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87lcv\" (UniqueName: \"kubernetes.io/projected/067ed730-bfcc-4d6e-84d4-28c57fa90343-kube-api-access-87lcv\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.001987 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.002016 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9qchq\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-kube-api-access-9qchq\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.003076 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-serving-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.004047 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.007247 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.008277 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.009995 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.010182 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mpnhl"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.012651 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-trusted-ca\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.012747 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-auth-proxy-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.012773 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-srv-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.012807 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sqjg9\" (UniqueName: \"kubernetes.io/projected/b1f335db-7e31-44a9-b113-bb546349caa7-kube-api-access-sqjg9\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.013024 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sffms"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.013063 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.017327 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dccnf"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.018843 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.019383 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-5j99m"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.020231 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.021094 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-g4x9b"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.021736 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.022048 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.023935 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.025634 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.027221 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zp4dh"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.029709 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-764mr"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.030296 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.031215 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.032844 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-54sg5"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.034354 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.037208 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t7bhx"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.039123 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.040664 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-54vvw"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.041773 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pnk89"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.042818 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-g4x9b"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.044138 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4vgrr"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.045217 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.046283 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.047317 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.048515 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-dflg7"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.049513 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.050904 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dflg7"] Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.051032 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.070372 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.090159 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.110691 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113480 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-socket-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113513 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-htvrt\" (UniqueName: \"kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113535 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113554 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-serving-cert\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113571 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8c49802-43de-4e97-8067-4824c3312194-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113586 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-node-pullsecrets\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113601 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113617 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3fa96974-2f91-4b24-b80d-4b221107adbe-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113632 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fa96974-2f91-4b24-b80d-4b221107adbe-serving-cert\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113650 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-dir\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113664 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvrd6\" (UniqueName: \"kubernetes.io/projected/b56b49d8-11f3-49bc-bad7-d24bd00f0589-kube-api-access-nvrd6\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113693 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-client\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113709 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-registration-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113730 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113774 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d46a176e-d2cd-41cc-8420-37762bc47cd3-metrics-tls\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113793 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-oauth-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113819 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk4hg\" (UniqueName: \"kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113836 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf25816-bff6-42fc-8e43-513b490e830b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113853 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a8c49802-43de-4e97-8067-4824c3312194-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113844 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-node-pullsecrets\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113871 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/585479e7-a937-42f2-9802-2117e25c68c1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113918 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtw5w\" (UniqueName: \"kubernetes.io/projected/4ee6bec9-a188-48bb-b49b-eeae08e55158-kube-api-access-rtw5w\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113939 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113946 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-dir\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.113955 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-csi-data-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114022 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-csi-data-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114025 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-client\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114061 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114093 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8c49802-43de-4e97-8067-4824c3312194-config\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114151 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-srv-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114181 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-socket-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114243 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-registration-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114187 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114346 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9pqdh\" (UniqueName: \"kubernetes.io/projected/97676058-3567-4d0a-b8da-ad5890e39080-kube-api-access-9pqdh\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114384 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114412 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdqzs\" (UniqueName: \"kubernetes.io/projected/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-kube-api-access-vdqzs\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114437 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114455 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/3fa96974-2f91-4b24-b80d-4b221107adbe-available-featuregates\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114464 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114586 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114626 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114664 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-image-import-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114701 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqj6b\" (UniqueName: \"kubernetes.io/projected/47fe0371-d4d4-40e2-905f-5e26db186cbe-kube-api-access-tqj6b\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114733 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97676058-3567-4d0a-b8da-ad5890e39080-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114763 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-oauth-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114795 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk66t\" (UniqueName: \"kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114828 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114860 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114890 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114921 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jl5zg\" (UniqueName: \"kubernetes.io/projected/16f872f6-c454-452f-adf9-bee0a76ebe2b-kube-api-access-jl5zg\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114953 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.114987 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhtl5\" (UniqueName: \"kubernetes.io/projected/25a97396-e69d-40d6-8734-95b5aaec338f-kube-api-access-lhtl5\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115018 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115050 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit-dir\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115080 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115139 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brg6q\" (UniqueName: \"kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115174 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nhr7l\" (UniqueName: \"kubernetes.io/projected/ae20ca57-847f-4344-9718-aa179543b4ae-kube-api-access-nhr7l\") pod \"downloads-7954f5f757-54sg5\" (UID: \"ae20ca57-847f-4344-9718-aa179543b4ae\") " pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115205 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d46a176e-d2cd-41cc-8420-37762bc47cd3-trusted-ca\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115238 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115268 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-serving-cert\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115300 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22qr6\" (UniqueName: \"kubernetes.io/projected/5dd173ed-ab63-4006-9e39-2f4abf301a8e-kube-api-access-22qr6\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115334 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115365 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/585479e7-a937-42f2-9802-2117e25c68c1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115396 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115405 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-oauth-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115444 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115473 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115499 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115506 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwf89\" (UniqueName: \"kubernetes.io/projected/3fa96974-2f91-4b24-b80d-4b221107adbe-kube-api-access-xwf89\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115581 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29hwm\" (UniqueName: \"kubernetes.io/projected/71626fcf-108c-42c4-95da-d634b73f587f-kube-api-access-29hwm\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115624 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tjcw\" (UniqueName: \"kubernetes.io/projected/e74fd6cc-f34d-41c4-8d01-0f556277340d-kube-api-access-6tjcw\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115652 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115662 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115734 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97676058-3567-4d0a-b8da-ad5890e39080-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115774 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9466dc5f-afcc-4586-bb92-cc23f5e64e77-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115810 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115849 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-profile-collector-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115880 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115918 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115951 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-images\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.115981 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-trusted-ca-bundle\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116031 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/585479e7-a937-42f2-9802-2117e25c68c1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116062 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4gjfm\" (UniqueName: \"kubernetes.io/projected/cbf25816-bff6-42fc-8e43-513b490e830b-kube-api-access-4gjfm\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116092 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-client\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116156 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9466dc5f-afcc-4586-bb92-cc23f5e64e77-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116191 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/067ed730-bfcc-4d6e-84d4-28c57fa90343-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116226 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e74fd6cc-f34d-41c4-8d01-0f556277340d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116258 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-config\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116292 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-mountpoint-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116324 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-service-ca\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116355 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-config\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116406 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-serving-cert\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116439 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116469 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116467 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116502 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-encryption-config\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116533 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-service-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116567 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116600 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/067ed730-bfcc-4d6e-84d4-28c57fa90343-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116630 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116664 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/beb34140-c131-478f-94d6-c4b5433b58e9-machine-approver-tls\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116698 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87lcv\" (UniqueName: \"kubernetes.io/projected/067ed730-bfcc-4d6e-84d4-28c57fa90343-kube-api-access-87lcv\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116730 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-encryption-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116762 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116810 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-auth-proxy-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116850 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-serving-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116857 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a8c49802-43de-4e97-8067-4824c3312194-config\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116883 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-trusted-ca\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116914 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-srv-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116945 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sqjg9\" (UniqueName: \"kubernetes.io/projected/b1f335db-7e31-44a9-b113-bb546349caa7-kube-api-access-sqjg9\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.116976 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-plugins-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117008 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-config\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117042 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vhfc\" (UniqueName: \"kubernetes.io/projected/72c63de9-5d4f-4037-b70e-11ddf9a4904c-kube-api-access-4vhfc\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117076 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7x89\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-kube-api-access-z7x89\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117134 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fchn\" (UniqueName: \"kubernetes.io/projected/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-kube-api-access-5fchn\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117166 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117198 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c5zwc\" (UniqueName: \"kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117230 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnhmp\" (UniqueName: \"kubernetes.io/projected/b82d04cc-00d3-43dc-8317-dacb594c8b61-kube-api-access-pnhmp\") pod \"migrator-59844c95c7-bfzz6\" (UID: \"b82d04cc-00d3-43dc-8317-dacb594c8b61\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117266 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw6k9\" (UniqueName: \"kubernetes.io/projected/beb34140-c131-478f-94d6-c4b5433b58e9-kube-api-access-nw6k9\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117299 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117333 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117367 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117442 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117474 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117509 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117522 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/cbf25816-bff6-42fc-8e43-513b490e830b-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117540 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-serving-cert\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117557 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117571 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-policies\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117608 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zdqwt\" (UniqueName: \"kubernetes.io/projected/9466dc5f-afcc-4586-bb92-cc23f5e64e77-kube-api-access-zdqwt\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.117642 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.118037 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-trusted-ca-bundle\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.118653 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.118857 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-mountpoint-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.118958 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.119286 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-audit-dir\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.119533 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.120233 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.120582 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.120715 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.120754 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.120777 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/b56b49d8-11f3-49bc-bad7-d24bd00f0589-plugins-dir\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.121085 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3fa96974-2f91-4b24-b80d-4b221107adbe-serving-cert\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.121252 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-client\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.121531 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d46a176e-d2cd-41cc-8420-37762bc47cd3-trusted-ca\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.121784 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.121908 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-config\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.122137 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-images\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.122343 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-image-import-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.122461 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a8c49802-43de-4e97-8067-4824c3312194-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.122795 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.124597 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d46a176e-d2cd-41cc-8420-37762bc47cd3-metrics-tls\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.124890 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-etcd-serving-ca\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125005 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125483 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-trusted-ca\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125514 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/585479e7-a937-42f2-9802-2117e25c68c1-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125609 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-serving-cert\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125667 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/585479e7-a937-42f2-9802-2117e25c68c1-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125711 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/beb34140-c131-478f-94d6-c4b5433b58e9-auth-proxy-config\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125750 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-profile-collector-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125850 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.125981 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-srv-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.126410 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e74fd6cc-f34d-41c4-8d01-0f556277340d-config\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.126687 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/067ed730-bfcc-4d6e-84d4-28c57fa90343-config\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.126840 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.127571 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.127855 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-serving-cert\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128019 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-oauth-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128023 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/067ed730-bfcc-4d6e-84d4-28c57fa90343-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128237 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-encryption-config\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128511 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/16f872f6-c454-452f-adf9-bee0a76ebe2b-srv-cert\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128629 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/e74fd6cc-f34d-41c4-8d01-0f556277340d-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128721 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128926 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/47fe0371-d4d4-40e2-905f-5e26db186cbe-profile-collector-cert\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.128939 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.129520 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.129983 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/beb34140-c131-478f-94d6-c4b5433b58e9-machine-approver-tls\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.132904 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.133183 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.135002 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.135129 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.145776 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-serving-cert\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.153827 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.170315 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.176956 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-service-ca\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.190240 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.203359 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-console-config\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.219948 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9466dc5f-afcc-4586-bb92-cc23f5e64e77-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.220070 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9466dc5f-afcc-4586-bb92-cc23f5e64e77-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.220426 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zdqwt\" (UniqueName: \"kubernetes.io/projected/9466dc5f-afcc-4586-bb92-cc23f5e64e77-kube-api-access-zdqwt\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.222056 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.225489 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5dd173ed-ab63-4006-9e39-2f4abf301a8e-trusted-ca-bundle\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.230815 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.250830 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.271201 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.281187 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-serving-cert\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.291902 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.303314 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-client\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.310189 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.311281 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-config\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.331217 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.339373 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.350419 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.353496 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/b1f335db-7e31-44a9-b113-bb546349caa7-etcd-service-ca\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.371301 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.410345 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.416973 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.431639 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.451375 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.459851 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-etcd-client\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.471418 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.478544 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-serving-cert\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.490845 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.500301 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4ee6bec9-a188-48bb-b49b-eeae08e55158-encryption-config\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.510916 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.531079 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.537461 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.550848 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.571436 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.590893 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.610973 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.631564 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.640791 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/4ee6bec9-a188-48bb-b49b-eeae08e55158-audit-policies\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.651421 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.671467 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.690798 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.710815 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.730712 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.750972 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.765493 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9466dc5f-afcc-4586-bb92-cc23f5e64e77-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.771876 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.781383 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9466dc5f-afcc-4586-bb92-cc23f5e64e77-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.790943 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.810990 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.830922 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.851433 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.857483 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/97676058-3567-4d0a-b8da-ad5890e39080-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.870944 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.891545 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.910869 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.931415 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.937280 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/97676058-3567-4d0a-b8da-ad5890e39080-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.949063 4985 request.go:700] Waited for 1.009986301s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/configmaps?fieldSelector=metadata.name%3Dclient-ca&limit=500&resourceVersion=0 Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.951063 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.970903 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 25 00:08:46 crc kubenswrapper[4985]: I0125 00:08:46.991767 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.011508 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.031668 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.050849 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.070896 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.091533 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.111576 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.114480 4985 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.114556 4985 secret.go:188] Couldn't get secret openshift-machine-api/control-plane-machine-set-operator-tls: failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.114621 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca podName:e1ea9185-aa51-4b82-98ed-b2f028d291b2 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.6145789 +0000 UTC m=+137.646515213 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca") pod "marketplace-operator-79b997595-fwpcj" (UID: "e1ea9185-aa51-4b82-98ed-b2f028d291b2") : failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.114661 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls podName:72c63de9-5d4f-4037-b70e-11ddf9a4904c nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.614642212 +0000 UTC m=+137.646578515 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "control-plane-machine-set-operator-tls" (UniqueName: "kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls") pod "control-plane-machine-set-operator-78cbb6b69f-7tgz4" (UID: "72c63de9-5d4f-4037-b70e-11ddf9a4904c") : failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.118956 4985 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.119082 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert podName:71626fcf-108c-42c4-95da-d634b73f587f nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.619048108 +0000 UTC m=+137.650984411 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert") pod "service-ca-operator-777779d784-7gxv5" (UID: "71626fcf-108c-42c4-95da-d634b73f587f") : failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.119277 4985 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.119345 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics podName:e1ea9185-aa51-4b82-98ed-b2f028d291b2 nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.619326755 +0000 UTC m=+137.651263068 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics") pod "marketplace-operator-79b997595-fwpcj" (UID: "e1ea9185-aa51-4b82-98ed-b2f028d291b2") : failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.120210 4985 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.120295 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume podName:677a7eeb-960f-4771-bd2f-9fedef723ffd nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.6202772 +0000 UTC m=+137.652213513 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume") pod "collect-profiles-29488320-8l78v" (UID: "677a7eeb-960f-4771-bd2f-9fedef723ffd") : failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122541 4985 secret.go:188] Couldn't get secret openshift-machine-config-operator/mco-proxy-tls: failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122546 4985 configmap.go:193] Couldn't get configMap openshift-machine-config-operator/machine-config-operator-images: failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122593 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls podName:25a97396-e69d-40d6-8734-95b5aaec338f nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.622580011 +0000 UTC m=+137.654516314 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls") pod "machine-config-operator-74547568cd-8ph92" (UID: "25a97396-e69d-40d6-8734-95b5aaec338f") : failed to sync secret cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122636 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images podName:25a97396-e69d-40d6-8734-95b5aaec338f nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.622612262 +0000 UTC m=+137.654548615 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images") pod "machine-config-operator-74547568cd-8ph92" (UID: "25a97396-e69d-40d6-8734-95b5aaec338f") : failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122645 4985 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: E0125 00:08:47.122687 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config podName:71626fcf-108c-42c4-95da-d634b73f587f nodeName:}" failed. No retries permitted until 2026-01-25 00:08:47.622675943 +0000 UTC m=+137.654612246 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config") pod "service-ca-operator-777779d784-7gxv5" (UID: "71626fcf-108c-42c4-95da-d634b73f587f") : failed to sync configmap cache: timed out waiting for the condition Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.130378 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.151087 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.171095 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.191403 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.210847 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.231338 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.251704 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.270837 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.291886 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.310500 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.331540 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.351169 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.370514 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.403364 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.411079 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.430273 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.454621 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.470873 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.490947 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.511477 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.530772 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.551714 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.571311 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.590456 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.610796 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.630473 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.647632 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.647728 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.647884 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.648003 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.648045 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.648214 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.648287 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.648320 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.650791 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71626fcf-108c-42c4-95da-d634b73f587f-config\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.651331 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.651817 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/25a97396-e69d-40d6-8734-95b5aaec338f-images\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.652685 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.653503 4985 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.655640 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71626fcf-108c-42c4-95da-d634b73f587f-serving-cert\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.656863 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/25a97396-e69d-40d6-8734-95b5aaec338f-proxy-tls\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.658062 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/72c63de9-5d4f-4037-b70e-11ddf9a4904c-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.667032 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.672296 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.737861 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.751749 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.758509 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9qchq\" (UniqueName: \"kubernetes.io/projected/de88820c-7cff-4928-8f36-9ec785accadc-kube-api-access-9qchq\") pod \"cluster-image-registry-operator-dc59b4c8b-jzhrb\" (UID: \"de88820c-7cff-4928-8f36-9ec785accadc\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.761623 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.771639 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.791056 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.810518 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.830744 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.851782 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.871151 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.891988 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.911723 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.931726 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.968906 4985 request.go:700] Waited for 1.85486155s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/serviceaccounts/csi-hostpath-provisioner-sa/token Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.975928 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-htvrt\" (UniqueName: \"kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt\") pod \"controller-manager-879f6c89f-mntqm\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:47 crc kubenswrapper[4985]: I0125 00:08:47.979725 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.003913 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvrd6\" (UniqueName: \"kubernetes.io/projected/b56b49d8-11f3-49bc-bad7-d24bd00f0589-kube-api-access-nvrd6\") pod \"csi-hostpathplugin-4vgrr\" (UID: \"b56b49d8-11f3-49bc-bad7-d24bd00f0589\") " pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.019432 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk4hg\" (UniqueName: \"kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg\") pod \"image-pruner-29488320-65m92\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.035360 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwf89\" (UniqueName: \"kubernetes.io/projected/3fa96974-2f91-4b24-b80d-4b221107adbe-kube-api-access-xwf89\") pod \"openshift-config-operator-7777fb866f-dccnf\" (UID: \"3fa96974-2f91-4b24-b80d-4b221107adbe\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.053043 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.059459 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9pqdh\" (UniqueName: \"kubernetes.io/projected/97676058-3567-4d0a-b8da-ad5890e39080-kube-api-access-9pqdh\") pod \"openshift-controller-manager-operator-756b6f6bc6-gp85q\" (UID: \"97676058-3567-4d0a-b8da-ad5890e39080\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.066959 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.076825 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a8c49802-43de-4e97-8067-4824c3312194-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jdgg6\" (UID: \"a8c49802-43de-4e97-8067-4824c3312194\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.087974 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.102030 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/585479e7-a937-42f2-9802-2117e25c68c1-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-5j4gc\" (UID: \"585479e7-a937-42f2-9802-2117e25c68c1\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.108950 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.118779 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtw5w\" (UniqueName: \"kubernetes.io/projected/4ee6bec9-a188-48bb-b49b-eeae08e55158-kube-api-access-rtw5w\") pod \"apiserver-7bbb656c7d-jql78\" (UID: \"4ee6bec9-a188-48bb-b49b-eeae08e55158\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.135523 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.143368 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdqzs\" (UniqueName: \"kubernetes.io/projected/e0fc4ac9-4ec8-4651-bd15-c55bbf199299-kube-api-access-vdqzs\") pod \"console-operator-58897d9998-sffms\" (UID: \"e0fc4ac9-4ec8-4651-bd15-c55bbf199299\") " pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.155732 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87lcv\" (UniqueName: \"kubernetes.io/projected/067ed730-bfcc-4d6e-84d4-28c57fa90343-kube-api-access-87lcv\") pod \"openshift-apiserver-operator-796bbdcf4f-9k44n\" (UID: \"067ed730-bfcc-4d6e-84d4-28c57fa90343\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.175060 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk66t\" (UniqueName: \"kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t\") pod \"marketplace-operator-79b997595-fwpcj\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.200752 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.201315 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29hwm\" (UniqueName: \"kubernetes.io/projected/71626fcf-108c-42c4-95da-d634b73f587f-kube-api-access-29hwm\") pod \"service-ca-operator-777779d784-7gxv5\" (UID: \"71626fcf-108c-42c4-95da-d634b73f587f\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.210376 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tjcw\" (UniqueName: \"kubernetes.io/projected/e74fd6cc-f34d-41c4-8d01-0f556277340d-kube-api-access-6tjcw\") pod \"machine-api-operator-5694c8668f-bnmp5\" (UID: \"e74fd6cc-f34d-41c4-8d01-0f556277340d\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.234391 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jl5zg\" (UniqueName: \"kubernetes.io/projected/16f872f6-c454-452f-adf9-bee0a76ebe2b-kube-api-access-jl5zg\") pod \"olm-operator-6b444d44fb-g9xw9\" (UID: \"16f872f6-c454-452f-adf9-bee0a76ebe2b\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.246839 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.248697 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4gjfm\" (UniqueName: \"kubernetes.io/projected/cbf25816-bff6-42fc-8e43-513b490e830b-kube-api-access-4gjfm\") pod \"cluster-samples-operator-665b6dd947-w7k95\" (UID: \"cbf25816-bff6-42fc-8e43-513b490e830b\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:48 crc kubenswrapper[4985]: W0125 00:08:48.259961 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod234cec4e_fc7e_4a34_b638_f1cc49fb2299.slice/crio-af8dac16149ad7a929e8951bc49acdbb20c65daf1c79556f97ef2a54c3a5ebcf WatchSource:0}: Error finding container af8dac16149ad7a929e8951bc49acdbb20c65daf1c79556f97ef2a54c3a5ebcf: Status 404 returned error can't find the container with id af8dac16149ad7a929e8951bc49acdbb20c65daf1c79556f97ef2a54c3a5ebcf Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.268818 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.269477 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22qr6\" (UniqueName: \"kubernetes.io/projected/5dd173ed-ab63-4006-9e39-2f4abf301a8e-kube-api-access-22qr6\") pod \"console-f9d7485db-54vvw\" (UID: \"5dd173ed-ab63-4006-9e39-2f4abf301a8e\") " pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.286454 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.286555 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnhmp\" (UniqueName: \"kubernetes.io/projected/b82d04cc-00d3-43dc-8317-dacb594c8b61-kube-api-access-pnhmp\") pod \"migrator-59844c95c7-bfzz6\" (UID: \"b82d04cc-00d3-43dc-8317-dacb594c8b61\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.307235 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nhr7l\" (UniqueName: \"kubernetes.io/projected/ae20ca57-847f-4344-9718-aa179543b4ae-kube-api-access-nhr7l\") pod \"downloads-7954f5f757-54sg5\" (UID: \"ae20ca57-847f-4344-9718-aa179543b4ae\") " pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.322538 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.327352 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.331647 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw6k9\" (UniqueName: \"kubernetes.io/projected/beb34140-c131-478f-94d6-c4b5433b58e9-kube-api-access-nw6k9\") pod \"machine-approver-56656f9798-znxjv\" (UID: \"beb34140-c131-478f-94d6-c4b5433b58e9\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.332517 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.332969 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.336202 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-pruner-29488320-65m92"] Jan 25 00:08:48 crc kubenswrapper[4985]: W0125 00:08:48.341196 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde88820c_7cff_4928_8f36_9ec785accadc.slice/crio-0f3394fb1f8480ca4c7eba546128da266ae07cbf1507b1ac693456bde9e2046f WatchSource:0}: Error finding container 0f3394fb1f8480ca4c7eba546128da266ae07cbf1507b1ac693456bde9e2046f: Status 404 returned error can't find the container with id 0f3394fb1f8480ca4c7eba546128da266ae07cbf1507b1ac693456bde9e2046f Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.344923 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.350150 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhtl5\" (UniqueName: \"kubernetes.io/projected/25a97396-e69d-40d6-8734-95b5aaec338f-kube-api-access-lhtl5\") pod \"machine-config-operator-74547568cd-8ph92\" (UID: \"25a97396-e69d-40d6-8734-95b5aaec338f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.357258 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" Jan 25 00:08:48 crc kubenswrapper[4985]: W0125 00:08:48.361039 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcdc7acca_bf54_44d2_986b_10ecfb1a0abd.slice/crio-f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f WatchSource:0}: Error finding container f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f: Status 404 returned error can't find the container with id f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.370343 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sqjg9\" (UniqueName: \"kubernetes.io/projected/b1f335db-7e31-44a9-b113-bb546349caa7-kube-api-access-sqjg9\") pod \"etcd-operator-b45778765-g229p\" (UID: \"b1f335db-7e31-44a9-b113-bb546349caa7\") " pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.380564 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-4vgrr"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.384661 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqj6b\" (UniqueName: \"kubernetes.io/projected/47fe0371-d4d4-40e2-905f-5e26db186cbe-kube-api-access-tqj6b\") pod \"catalog-operator-68c6474976-2dflz\" (UID: \"47fe0371-d4d4-40e2-905f-5e26db186cbe\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.390620 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.409753 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vhfc\" (UniqueName: \"kubernetes.io/projected/72c63de9-5d4f-4037-b70e-11ddf9a4904c-kube-api-access-4vhfc\") pod \"control-plane-machine-set-operator-78cbb6b69f-7tgz4\" (UID: \"72c63de9-5d4f-4037-b70e-11ddf9a4904c\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:48 crc kubenswrapper[4985]: W0125 00:08:48.413809 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb56b49d8_11f3_49bc_bad7_d24bd00f0589.slice/crio-75e1485168a7bd505be89120802c4cddf0782ffb1a7d468a7b42d78ee0550ea5 WatchSource:0}: Error finding container 75e1485168a7bd505be89120802c4cddf0782ffb1a7d468a7b42d78ee0550ea5: Status 404 returned error can't find the container with id 75e1485168a7bd505be89120802c4cddf0782ffb1a7d468a7b42d78ee0550ea5 Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.427421 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.434743 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c5zwc\" (UniqueName: \"kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc\") pod \"oauth-openshift-558db77b4-29vvw\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.439743 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.444949 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.450234 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7x89\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-kube-api-access-z7x89\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.466353 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.467957 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.468590 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d46a176e-d2cd-41cc-8420-37762bc47cd3-bound-sa-token\") pod \"ingress-operator-5b745b69d9-mp62x\" (UID: \"d46a176e-d2cd-41cc-8420-37762bc47cd3\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.472782 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.487397 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.487521 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fchn\" (UniqueName: \"kubernetes.io/projected/4c2471c6-f9fd-439f-a0cf-1e4e166ed30b-kube-api-access-5fchn\") pod \"apiserver-76f77b778f-ctsj5\" (UID: \"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b\") " pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.500365 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.507399 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brg6q\" (UniqueName: \"kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q\") pod \"collect-profiles-29488320-8l78v\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.513846 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.523888 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zdqwt\" (UniqueName: \"kubernetes.io/projected/9466dc5f-afcc-4586-bb92-cc23f5e64e77-kube-api-access-zdqwt\") pod \"kube-storage-version-migrator-operator-b67b599dd-n9j92\" (UID: \"9466dc5f-afcc-4586-bb92-cc23f5e64e77\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.524907 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.536608 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-dccnf"] Jan 25 00:08:48 crc kubenswrapper[4985]: I0125 00:08:48.608839 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.510509 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.513840 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.514165 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.514853 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.515471 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.015454176 +0000 UTC m=+140.047390449 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.516274 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.516527 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.524065 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.528501 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.529576 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29488320-65m92" event={"ID":"cdc7acca-bf54-44d2-986b-10ecfb1a0abd","Type":"ContainerStarted","Data":"f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f"} Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.531910 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.532803 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.535299 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.536964 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n"] Jan 25 00:08:49 crc kubenswrapper[4985]: W0125 00:08:49.538421 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ee6bec9_a188_48bb_b49b_eeae08e55158.slice/crio-c53bec5dc8d2b9980297b234a814c13e7aaadbfe84c6a1500e82968a9d2e2c07 WatchSource:0}: Error finding container c53bec5dc8d2b9980297b234a814c13e7aaadbfe84c6a1500e82968a9d2e2c07: Status 404 returned error can't find the container with id c53bec5dc8d2b9980297b234a814c13e7aaadbfe84c6a1500e82968a9d2e2c07 Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.539817 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" event={"ID":"234cec4e-fc7e-4a34-b638-f1cc49fb2299","Type":"ContainerStarted","Data":"af8dac16149ad7a929e8951bc49acdbb20c65daf1c79556f97ef2a54c3a5ebcf"} Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.542800 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bnmp5"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.549144 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" event={"ID":"de88820c-7cff-4928-8f36-9ec785accadc","Type":"ContainerStarted","Data":"0f3394fb1f8480ca4c7eba546128da266ae07cbf1507b1ac693456bde9e2046f"} Jan 25 00:08:49 crc kubenswrapper[4985]: W0125 00:08:49.559547 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod585479e7_a937_42f2_9802_2117e25c68c1.slice/crio-69ea84b5d83abce7c27751df77b823ea683e9c27b8df45f925377986a12ba57d WatchSource:0}: Error finding container 69ea84b5d83abce7c27751df77b823ea683e9c27b8df45f925377986a12ba57d: Status 404 returned error can't find the container with id 69ea84b5d83abce7c27751df77b823ea683e9c27b8df45f925377986a12ba57d Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.560084 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" event={"ID":"b56b49d8-11f3-49bc-bad7-d24bd00f0589","Type":"ContainerStarted","Data":"75e1485168a7bd505be89120802c4cddf0782ffb1a7d468a7b42d78ee0550ea5"} Jan 25 00:08:49 crc kubenswrapper[4985]: W0125 00:08:49.570772 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode1ea9185_aa51_4b82_98ed_b2f028d291b2.slice/crio-1d618d640e058c479e8de5ed06522c0b9820ef50210f727368cb3b22b268234c WatchSource:0}: Error finding container 1d618d640e058c479e8de5ed06522c0b9820ef50210f727368cb3b22b268234c: Status 404 returned error can't find the container with id 1d618d640e058c479e8de5ed06522c0b9820ef50210f727368cb3b22b268234c Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617386 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617814 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617864 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-metrics-tls\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617893 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec54eb4a-a089-4c2a-9049-00a412be5916-serving-cert\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617935 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-metrics-certs\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.617954 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz787\" (UniqueName: \"kubernetes.io/projected/226aaa66-f8e9-42a6-b938-34b14f322d48-kube-api-access-vz787\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618023 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618038 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4dqg\" (UniqueName: \"kubernetes.io/projected/a0cac91e-216e-424e-b665-cf28717932b0-kube-api-access-v4dqg\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618056 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-apiservice-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618128 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618147 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618166 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-default-certificate\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618180 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618229 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d260c75-2257-4b95-982c-630a20b9d157-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618248 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/22db9bf4-8af9-460e-ae6b-4874fe32053b-tmpfs\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618288 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s726s\" (UniqueName: \"kubernetes.io/projected/2bfd38dc-27e5-4906-a593-ea58e49340b8-kube-api-access-s726s\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618304 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qbvk\" (UniqueName: \"kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618321 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn55d\" (UniqueName: \"kubernetes.io/projected/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-kube-api-access-jn55d\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618339 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acd1a373-e926-421c-88a1-4f46fd6dcdb8-proxy-tls\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618357 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/226aaa66-f8e9-42a6-b938-34b14f322d48-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618398 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618430 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-config\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618447 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d260c75-2257-4b95-982c-630a20b9d157-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618501 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618535 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz66m\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.618585 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.118561483 +0000 UTC m=+140.150497816 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.618988 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db4mk\" (UniqueName: \"kubernetes.io/projected/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-kube-api-access-db4mk\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619007 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619030 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619048 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acd1a373-e926-421c-88a1-4f46fd6dcdb8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619063 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-service-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619077 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0cac91e-216e-424e-b665-cf28717932b0-signing-key\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619159 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-stats-auth\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619176 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-webhook-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619192 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.619260 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d260c75-2257-4b95-982c-630a20b9d157-config\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.620019 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.120009011 +0000 UTC m=+140.151945284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620195 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0cac91e-216e-424e-b665-cf28717932b0-signing-cabundle\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620235 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgtwf\" (UniqueName: \"kubernetes.io/projected/acd1a373-e926-421c-88a1-4f46fd6dcdb8-kube-api-access-rgtwf\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620256 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620304 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2bfd38dc-27e5-4906-a593-ea58e49340b8-service-ca-bundle\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620603 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9cvf\" (UniqueName: \"kubernetes.io/projected/22db9bf4-8af9-460e-ae6b-4874fe32053b-kube-api-access-q9cvf\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620763 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.620782 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dzcvv\" (UniqueName: \"kubernetes.io/projected/ec54eb4a-a089-4c2a-9049-00a412be5916-kube-api-access-dzcvv\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721345 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.721488 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.221469615 +0000 UTC m=+140.253405888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721569 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acd1a373-e926-421c-88a1-4f46fd6dcdb8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721593 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-service-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721609 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0cac91e-216e-424e-b665-cf28717932b0-signing-key\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721633 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-stats-auth\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721650 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-webhook-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721666 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721689 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-certs\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721707 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d260c75-2257-4b95-982c-630a20b9d157-config\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721767 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c96kd\" (UniqueName: \"kubernetes.io/projected/e27fe515-3c73-4be3-aaf4-34ed0f73992a-kube-api-access-c96kd\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721794 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0cac91e-216e-424e-b665-cf28717932b0-signing-cabundle\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721823 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgtwf\" (UniqueName: \"kubernetes.io/projected/acd1a373-e926-421c-88a1-4f46fd6dcdb8-kube-api-access-rgtwf\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721841 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721858 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2bfd38dc-27e5-4906-a593-ea58e49340b8-service-ca-bundle\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721892 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-schj9\" (UniqueName: \"kubernetes.io/projected/ca96a556-37cd-4d51-bc17-66eb3547c482-kube-api-access-schj9\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721915 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca96a556-37cd-4d51-bc17-66eb3547c482-cert\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721977 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9cvf\" (UniqueName: \"kubernetes.io/projected/22db9bf4-8af9-460e-ae6b-4874fe32053b-kube-api-access-q9cvf\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.721998 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722017 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dzcvv\" (UniqueName: \"kubernetes.io/projected/ec54eb4a-a089-4c2a-9049-00a412be5916-kube-api-access-dzcvv\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722067 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722097 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-metrics-tls\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722158 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-node-bootstrap-token\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722191 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec54eb4a-a089-4c2a-9049-00a412be5916-serving-cert\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722206 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-metrics-certs\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722241 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz787\" (UniqueName: \"kubernetes.io/projected/226aaa66-f8e9-42a6-b938-34b14f322d48-kube-api-access-vz787\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722299 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722332 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4dqg\" (UniqueName: \"kubernetes.io/projected/a0cac91e-216e-424e-b665-cf28717932b0-kube-api-access-v4dqg\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722357 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-apiservice-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722383 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722425 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-config-volume\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722442 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722457 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-default-certificate\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722473 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722487 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d260c75-2257-4b95-982c-630a20b9d157-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722503 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/22db9bf4-8af9-460e-ae6b-4874fe32053b-tmpfs\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722545 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cwlmk\" (UniqueName: \"kubernetes.io/projected/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-kube-api-access-cwlmk\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722577 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s726s\" (UniqueName: \"kubernetes.io/projected/2bfd38dc-27e5-4906-a593-ea58e49340b8-kube-api-access-s726s\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722596 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qbvk\" (UniqueName: \"kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722644 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn55d\" (UniqueName: \"kubernetes.io/projected/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-kube-api-access-jn55d\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722704 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acd1a373-e926-421c-88a1-4f46fd6dcdb8-proxy-tls\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722720 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/226aaa66-f8e9-42a6-b938-34b14f322d48-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722767 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722802 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-config\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722818 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-metrics-tls\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722852 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d260c75-2257-4b95-982c-630a20b9d157-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722906 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722946 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz66m\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.722963 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db4mk\" (UniqueName: \"kubernetes.io/projected/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-kube-api-access-db4mk\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.723005 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.723036 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.723223 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-service-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.723283 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.223275111 +0000 UTC m=+140.255211384 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.723733 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/22db9bf4-8af9-460e-ae6b-4874fe32053b-tmpfs\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.724995 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2bfd38dc-27e5-4906-a593-ea58e49340b8-service-ca-bundle\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.728272 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.729417 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.732832 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-config\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.734411 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/ec54eb4a-a089-4c2a-9049-00a412be5916-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.734567 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-metrics-tls\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.734672 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.736603 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.737098 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.737598 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-apiservice-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.737754 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/a0cac91e-216e-424e-b665-cf28717932b0-signing-cabundle\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.739654 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ec54eb4a-a089-4c2a-9049-00a412be5916-serving-cert\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.740898 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s726s\" (UniqueName: \"kubernetes.io/projected/2bfd38dc-27e5-4906-a593-ea58e49340b8-kube-api-access-s726s\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.741716 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-metrics-certs\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.743254 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.743732 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-stats-auth\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.746396 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d260c75-2257-4b95-982c-630a20b9d157-config\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.746764 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.747138 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/a0cac91e-216e-424e-b665-cf28717932b0-signing-key\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.747302 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn55d\" (UniqueName: \"kubernetes.io/projected/6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c-kube-api-access-jn55d\") pod \"multus-admission-controller-857f4d67dd-zp4dh\" (UID: \"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.747708 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acd1a373-e926-421c-88a1-4f46fd6dcdb8-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.748566 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/226aaa66-f8e9-42a6-b938-34b14f322d48-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.750568 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2bfd38dc-27e5-4906-a593-ea58e49340b8-default-certificate\") pod \"router-default-5444994796-5z29b\" (UID: \"2bfd38dc-27e5-4906-a593-ea58e49340b8\") " pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.750816 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acd1a373-e926-421c-88a1-4f46fd6dcdb8-proxy-tls\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.751675 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dzcvv\" (UniqueName: \"kubernetes.io/projected/ec54eb4a-a089-4c2a-9049-00a412be5916-kube-api-access-dzcvv\") pod \"authentication-operator-69f744f599-t7bhx\" (UID: \"ec54eb4a-a089-4c2a-9049-00a412be5916\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.751880 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9cvf\" (UniqueName: \"kubernetes.io/projected/22db9bf4-8af9-460e-ae6b-4874fe32053b-kube-api-access-q9cvf\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.756425 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.758208 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22db9bf4-8af9-460e-ae6b-4874fe32053b-webhook-cert\") pod \"packageserver-d55dfcdfc-bwg9c\" (UID: \"22db9bf4-8af9-460e-ae6b-4874fe32053b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.758668 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d260c75-2257-4b95-982c-630a20b9d157-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.758892 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz66m\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.759165 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.761381 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4dqg\" (UniqueName: \"kubernetes.io/projected/a0cac91e-216e-424e-b665-cf28717932b0-kube-api-access-v4dqg\") pod \"service-ca-9c57cc56f-pnk89\" (UID: \"a0cac91e-216e-424e-b665-cf28717932b0\") " pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.767526 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.774627 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz787\" (UniqueName: \"kubernetes.io/projected/226aaa66-f8e9-42a6-b938-34b14f322d48-kube-api-access-vz787\") pod \"package-server-manager-789f6589d5-cjjtf\" (UID: \"226aaa66-f8e9-42a6-b938-34b14f322d48\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.774694 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qbvk\" (UniqueName: \"kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk\") pod \"route-controller-manager-6576b87f9c-92txc\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.783052 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.783163 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db4mk\" (UniqueName: \"kubernetes.io/projected/1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3-kube-api-access-db4mk\") pod \"dns-operator-744455d44c-mpnhl\" (UID: \"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3\") " pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.783635 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgtwf\" (UniqueName: \"kubernetes.io/projected/acd1a373-e926-421c-88a1-4f46fd6dcdb8-kube-api-access-rgtwf\") pod \"machine-config-controller-84d6567774-764mr\" (UID: \"acd1a373-e926-421c-88a1-4f46fd6dcdb8\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.784015 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/5d260c75-2257-4b95-982c-630a20b9d157-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-kz648\" (UID: \"5d260c75-2257-4b95-982c-630a20b9d157\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.788914 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.795826 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.819481 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824021 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824194 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-certs\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824241 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c96kd\" (UniqueName: \"kubernetes.io/projected/e27fe515-3c73-4be3-aaf4-34ed0f73992a-kube-api-access-c96kd\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824278 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-schj9\" (UniqueName: \"kubernetes.io/projected/ca96a556-37cd-4d51-bc17-66eb3547c482-kube-api-access-schj9\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824303 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca96a556-37cd-4d51-bc17-66eb3547c482-cert\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824341 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-node-bootstrap-token\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824386 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-config-volume\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824413 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cwlmk\" (UniqueName: \"kubernetes.io/projected/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-kube-api-access-cwlmk\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.824459 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-metrics-tls\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.825769 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.325740822 +0000 UTC m=+140.357677105 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.830349 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-certs\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.831546 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-config-volume\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.832654 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/e27fe515-3c73-4be3-aaf4-34ed0f73992a-node-bootstrap-token\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.835770 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ca96a556-37cd-4d51-bc17-66eb3547c482-cert\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.836458 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-metrics-tls\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.839378 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.848728 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c96kd\" (UniqueName: \"kubernetes.io/projected/e27fe515-3c73-4be3-aaf4-34ed0f73992a-kube-api-access-c96kd\") pod \"machine-config-server-5j99m\" (UID: \"e27fe515-3c73-4be3-aaf4-34ed0f73992a\") " pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.868934 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cwlmk\" (UniqueName: \"kubernetes.io/projected/07ecbb8f-d2fb-4933-a4b1-f411717ee5f3-kube-api-access-cwlmk\") pod \"dns-default-dflg7\" (UID: \"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3\") " pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.871815 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.889609 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-schj9\" (UniqueName: \"kubernetes.io/projected/ca96a556-37cd-4d51-bc17-66eb3547c482-kube-api-access-schj9\") pod \"ingress-canary-g4x9b\" (UID: \"ca96a556-37cd-4d51-bc17-66eb3547c482\") " pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.902294 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-5j99m" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.918930 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-g4x9b" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.925720 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.925741 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:49 crc kubenswrapper[4985]: E0125 00:08:49.925994 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.425983863 +0000 UTC m=+140.457920136 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.936803 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.960215 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.961711 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.968842 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-sffms"] Jan 25 00:08:49 crc kubenswrapper[4985]: I0125 00:08:49.980616 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.008078 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.026468 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.026858 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.52684185 +0000 UTC m=+140.558778123 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.220329 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.220727 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.720711498 +0000 UTC m=+140.752647781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.223118 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" Jan 25 00:08:50 crc kubenswrapper[4985]: W0125 00:08:50.279934 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod72c63de9_5d4f_4037_b70e_11ddf9a4904c.slice/crio-a4cb57236027e6189420c5cc82a9d64350ca5ab117bbc03a8194916e69bfb6c0 WatchSource:0}: Error finding container a4cb57236027e6189420c5cc82a9d64350ca5ab117bbc03a8194916e69bfb6c0: Status 404 returned error can't find the container with id a4cb57236027e6189420c5cc82a9d64350ca5ab117bbc03a8194916e69bfb6c0 Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.325591 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.325867 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.825853528 +0000 UTC m=+140.857789801 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.477827 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.478160 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:50.978148991 +0000 UTC m=+141.010085264 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: W0125 00:08:50.557925 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9466dc5f_afcc_4586_bb92_cc23f5e64e77.slice/crio-296323f8d2e4e5b25c6b1a70a84d2e81437939b6011122ce0af4a88fba6a3309 WatchSource:0}: Error finding container 296323f8d2e4e5b25c6b1a70a84d2e81437939b6011122ce0af4a88fba6a3309: Status 404 returned error can't find the container with id 296323f8d2e4e5b25c6b1a70a84d2e81437939b6011122ce0af4a88fba6a3309 Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.589212 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.589489 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.089476853 +0000 UTC m=+141.121413126 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.691150 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.691468 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.191438471 +0000 UTC m=+141.223374744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.737732 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" event={"ID":"a8c49802-43de-4e97-8067-4824c3312194","Type":"ContainerStarted","Data":"be3002ddf00c1c5b38b0fc463f1f2c3800c60db716c7d27131e28ff665136511"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.755052 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29488320-65m92" event={"ID":"cdc7acca-bf54-44d2-986b-10ecfb1a0abd","Type":"ContainerStarted","Data":"2715347b0cad6a0cfee5589aefe5e0abd8991ac05ac51be5ccf6a125fed1f876"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.784846 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" event={"ID":"cbf25816-bff6-42fc-8e43-513b490e830b","Type":"ContainerStarted","Data":"2d0299304629bf5b458a13773f921661d339bc0d5fd58a702f32b4616ee2c0d9"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.786836 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" event={"ID":"585479e7-a937-42f2-9802-2117e25c68c1","Type":"ContainerStarted","Data":"69ea84b5d83abce7c27751df77b823ea683e9c27b8df45f925377986a12ba57d"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.794135 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.794724 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.294696171 +0000 UTC m=+141.326632504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.802323 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerStarted","Data":"1d618d640e058c479e8de5ed06522c0b9820ef50210f727368cb3b22b268234c"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.895843 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:50 crc kubenswrapper[4985]: E0125 00:08:50.897514 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.397500829 +0000 UTC m=+141.429437102 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.904363 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz"] Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.905570 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" event={"ID":"25a97396-e69d-40d6-8734-95b5aaec338f","Type":"ContainerStarted","Data":"8f095aad2a4a9a29e83fadd05c53f3822ec9b5e31677e5c202d277fe8b64a3b4"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.971958 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" event={"ID":"71626fcf-108c-42c4-95da-d634b73f587f","Type":"ContainerStarted","Data":"93b9af0779923c6cb0b202596dce048b4412ea8bf8dec3ddf9d10ce1b1743fb4"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.975169 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-ctsj5"] Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.982792 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" event={"ID":"de88820c-7cff-4928-8f36-9ec785accadc","Type":"ContainerStarted","Data":"86ec730f9888329c1f2cc478b3bc789677085babd1cff165c8eb9f9315995793"} Jan 25 00:08:50 crc kubenswrapper[4985]: I0125 00:08:50.987960 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" event={"ID":"72c63de9-5d4f-4037-b70e-11ddf9a4904c","Type":"ContainerStarted","Data":"a4cb57236027e6189420c5cc82a9d64350ca5ab117bbc03a8194916e69bfb6c0"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.002511 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" event={"ID":"e74fd6cc-f34d-41c4-8d01-0f556277340d","Type":"ContainerStarted","Data":"604b198c585fff27a56fd54243871593b46c103ed0e0e65c64d8eb731a745581"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.005309 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sffms" event={"ID":"e0fc4ac9-4ec8-4651-bd15-c55bbf199299","Type":"ContainerStarted","Data":"524eee8f4c5e1b39c93c8e76f4cacb910cb175abbff259a3f5c3d123cbb6b252"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.010624 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.012020 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.511998956 +0000 UTC m=+141.543935229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.019529 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" event={"ID":"97676058-3567-4d0a-b8da-ad5890e39080","Type":"ContainerStarted","Data":"5ad3f5d03f92a0edc69ff08830ad42ba9528c814ea4b64ea602a0659c623a3e3"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.023040 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" event={"ID":"234cec4e-fc7e-4a34-b638-f1cc49fb2299","Type":"ContainerStarted","Data":"0e8f2782261f9d349ab9b076ec7ba6e4cc1b1621871372bbe096931f7e0941bd"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.031768 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.034356 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" event={"ID":"067ed730-bfcc-4d6e-84d4-28c57fa90343","Type":"ContainerStarted","Data":"60ece88c0050f5f8787cfa228dd9d07977f7f9b9e12fe20b0da115351dabbaa4"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.067164 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.067512 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" event={"ID":"3fa96974-2f91-4b24-b80d-4b221107adbe","Type":"ContainerStarted","Data":"b6d867ccc91fd70e40749594c65903740ffb6d9d042187012411d7550a1b0aed"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.079016 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" event={"ID":"4ee6bec9-a188-48bb-b49b-eeae08e55158","Type":"ContainerStarted","Data":"c53bec5dc8d2b9980297b234a814c13e7aaadbfe84c6a1500e82968a9d2e2c07"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.085625 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" event={"ID":"9466dc5f-afcc-4586-bb92-cc23f5e64e77","Type":"ContainerStarted","Data":"296323f8d2e4e5b25c6b1a70a84d2e81437939b6011122ce0af4a88fba6a3309"} Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.116789 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.117654 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.617642529 +0000 UTC m=+141.649578802 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.141767 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-pruner-29488320-65m92" podStartSLOduration=122.141751875 podStartE2EDuration="2m2.141751875s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:51.138711934 +0000 UTC m=+141.170648207" watchObservedRunningTime="2026-01-25 00:08:51.141751875 +0000 UTC m=+141.173688148" Jan 25 00:08:51 crc kubenswrapper[4985]: W0125 00:08:51.153638 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod47fe0371_d4d4_40e2_905f_5e26db186cbe.slice/crio-866a4d31206f24950a999c12e88092fcf751b45f8accba75bb75ea79396bded7 WatchSource:0}: Error finding container 866a4d31206f24950a999c12e88092fcf751b45f8accba75bb75ea79396bded7: Status 404 returned error can't find the container with id 866a4d31206f24950a999c12e88092fcf751b45f8accba75bb75ea79396bded7 Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.160221 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-jzhrb" podStartSLOduration=122.160203801 podStartE2EDuration="2m2.160203801s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:51.153784592 +0000 UTC m=+141.185720865" watchObservedRunningTime="2026-01-25 00:08:51.160203801 +0000 UTC m=+141.192140074" Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.221169 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.222645 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.722627785 +0000 UTC m=+141.754564048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.292074 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" podStartSLOduration=122.292055554 podStartE2EDuration="2m2.292055554s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:51.2903408 +0000 UTC m=+141.322277083" watchObservedRunningTime="2026-01-25 00:08:51.292055554 +0000 UTC m=+141.323991827" Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.324665 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.325038 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.825024613 +0000 UTC m=+141.856960886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.341073 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-g229p"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.341258 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" podStartSLOduration=122.341240031 podStartE2EDuration="2m2.341240031s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:51.331771451 +0000 UTC m=+141.363707724" watchObservedRunningTime="2026-01-25 00:08:51.341240031 +0000 UTC m=+141.373176304" Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.348306 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-54vvw"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.368523 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-54sg5"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.383083 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.407857 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29vvw"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.420573 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.428572 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.428963 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:51.928947481 +0000 UTC m=+141.960883754 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.529772 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.530087 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.030075666 +0000 UTC m=+142.062011939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.552982 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.615001 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-zp4dh"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.637555 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9"] Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.637855 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.638414 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.13839823 +0000 UTC m=+142.170334503 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.740045 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.740439 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.240425848 +0000 UTC m=+142.272362121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.841440 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.841852 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.341835989 +0000 UTC m=+142.373772262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:51 crc kubenswrapper[4985]: W0125 00:08:51.938983 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f8695ce_bb8b_4288_bf4d_bd30db7eeb8c.slice/crio-cd80d53b6d942a67b45362707da2d46b458d1adcfd7da0826e796d26308ad8b7 WatchSource:0}: Error finding container cd80d53b6d942a67b45362707da2d46b458d1adcfd7da0826e796d26308ad8b7: Status 404 returned error can't find the container with id cd80d53b6d942a67b45362707da2d46b458d1adcfd7da0826e796d26308ad8b7 Jan 25 00:08:51 crc kubenswrapper[4985]: I0125 00:08:51.942930 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:51 crc kubenswrapper[4985]: E0125 00:08:51.943256 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.443244662 +0000 UTC m=+142.475180935 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.043562 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.043846 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.543831332 +0000 UTC m=+142.575767605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.101701 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-g4x9b"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.115790 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.120007 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" event={"ID":"25a97396-e69d-40d6-8734-95b5aaec338f","Type":"ContainerStarted","Data":"f6fdc1d7922356c56e84cc43fb954e62b08331fddb1c68696dbf37c573ede6a8"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.129274 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" event={"ID":"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c","Type":"ContainerStarted","Data":"cd80d53b6d942a67b45362707da2d46b458d1adcfd7da0826e796d26308ad8b7"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.131696 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" event={"ID":"b1f335db-7e31-44a9-b113-bb546349caa7","Type":"ContainerStarted","Data":"44d3e75395daca76ce0255f88bd7ba9121d29e0619e12c2f6a36511f51e4f79e"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.143642 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" event={"ID":"067ed730-bfcc-4d6e-84d4-28c57fa90343","Type":"ContainerStarted","Data":"31bad1f66db42772329318c3e3c92a0b81535cd0017005ccf3f22805df2b4605"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.147424 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.147725 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.647715239 +0000 UTC m=+142.679651512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.168289 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-764mr"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.169414 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" event={"ID":"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b","Type":"ContainerStarted","Data":"c418e7413beb8d03f997acef58ced0bd0cd1b9812a2480d315ff03ad5c91655f"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.181535 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5j99m" event={"ID":"e27fe515-3c73-4be3-aaf4-34ed0f73992a","Type":"ContainerStarted","Data":"3eafc4d2601c192a5e4f66188dc6631b6794867360af733270d592ce3cca4ec4"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.186118 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-9k44n" podStartSLOduration=124.18609264 podStartE2EDuration="2m4.18609264s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.184150199 +0000 UTC m=+142.216086472" watchObservedRunningTime="2026-01-25 00:08:52.18609264 +0000 UTC m=+142.218028913" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.198364 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" event={"ID":"b56b49d8-11f3-49bc-bad7-d24bd00f0589","Type":"ContainerStarted","Data":"3c3bee61674b045d1f472fcdb7503929c8ccd272aa6cdf64e986959054fddb28"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.210289 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" event={"ID":"b82d04cc-00d3-43dc-8317-dacb594c8b61","Type":"ContainerStarted","Data":"117e1552a03218e6fc3467bebf6998551e74d1f69f1e1d6eb3aa248e55070486"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.217314 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" event={"ID":"d46a176e-d2cd-41cc-8420-37762bc47cd3","Type":"ContainerStarted","Data":"4ba7801427fe4c58447324c2933876f09baf8d081343ef187c698a36ff1f8f4f"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.231800 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-gp85q" event={"ID":"97676058-3567-4d0a-b8da-ad5890e39080","Type":"ContainerStarted","Data":"3c5c1f086b97207c572d4bcf8c85517af20ee0112314d532b54100a3d464043f"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.248883 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.251661 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" event={"ID":"677a7eeb-960f-4771-bd2f-9fedef723ffd","Type":"ContainerStarted","Data":"9c33d93fa12e596472a04fd501667841b5cf352166bb912b2c8743661317cf8a"} Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.256240 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.756207877 +0000 UTC m=+142.788144150 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.308223 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.309877 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.809860992 +0000 UTC m=+142.841797265 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.341657 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" event={"ID":"47fe0371-d4d4-40e2-905f-5e26db186cbe","Type":"ContainerStarted","Data":"866a4d31206f24950a999c12e88092fcf751b45f8accba75bb75ea79396bded7"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.358426 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" event={"ID":"7e68708c-6c3b-43d8-8005-1e144e5f8ad1","Type":"ContainerStarted","Data":"817342bb8ac2ef285c89b7a1e6d12f8ca5b218b906a719477c0518eab89b47de"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.368969 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" event={"ID":"beb34140-c131-478f-94d6-c4b5433b58e9","Type":"ContainerStarted","Data":"144411f00b10c29b9c1def2a36dfd000fda2b891cfc07f25d8b2f9e89659664b"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.371617 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-54sg5" event={"ID":"ae20ca57-847f-4344-9718-aa179543b4ae","Type":"ContainerStarted","Data":"0021e1edb1bbb8607255657fc167100d196ce3919ac279f00187552aa75dee23"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.403011 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.409600 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.409958 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:52.909943588 +0000 UTC m=+142.941879861 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.434099 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.448728 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" event={"ID":"72c63de9-5d4f-4037-b70e-11ddf9a4904c","Type":"ContainerStarted","Data":"28964c8cf988e4bb80252abbda2d7ee475cad53591450bab352334faa2d17b2b"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.483350 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-7tgz4" podStartSLOduration=123.483332971 podStartE2EDuration="2m3.483332971s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.481546804 +0000 UTC m=+142.513483097" watchObservedRunningTime="2026-01-25 00:08:52.483332971 +0000 UTC m=+142.515269244" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.499007 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerStarted","Data":"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.499590 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.507243 4985 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-fwpcj container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" start-of-body= Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.507282 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.39:8080/healthz\": dial tcp 10.217.0.39:8080: connect: connection refused" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.511677 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" event={"ID":"585479e7-a937-42f2-9802-2117e25c68c1","Type":"ContainerStarted","Data":"69562686ded4b1d9e5d0b4e20da4bef4da6de243b93a058577df126c205ddcba"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.513768 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.514116 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.014088941 +0000 UTC m=+143.046025214 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.549720 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" podStartSLOduration=123.54970277 podStartE2EDuration="2m3.54970277s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.54440857 +0000 UTC m=+142.576344853" watchObservedRunningTime="2026-01-25 00:08:52.54970277 +0000 UTC m=+142.581639043" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.556285 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" event={"ID":"16f872f6-c454-452f-adf9-bee0a76ebe2b","Type":"ContainerStarted","Data":"b580e1d65df1bb71ccd874e8e141dc192ac9032d4cbe35025f70f7f7334ff5b8"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.564377 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" event={"ID":"71626fcf-108c-42c4-95da-d634b73f587f","Type":"ContainerStarted","Data":"07155a5ed4d82e4d256ca0f99058bb5d923f68c47910a539201d44d307b19775"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.571261 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" event={"ID":"e74fd6cc-f34d-41c4-8d01-0f556277340d","Type":"ContainerStarted","Data":"2220bc6f017595cd4e5f2ebbf1d4c096cdf998692df6c64ceecfb165e024f608"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.577524 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-54vvw" event={"ID":"5dd173ed-ab63-4006-9e39-2f4abf301a8e","Type":"ContainerStarted","Data":"dc397c2408733cad5ce1c1a7504ad7ffb9d7a857bbfb10a7e443c382d3f843f6"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.578184 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5z29b" event={"ID":"2bfd38dc-27e5-4906-a593-ea58e49340b8","Type":"ContainerStarted","Data":"3dcd6082c5a3707bf2e580dc41b2314db7e959b231cbc4598a3e3340c6eceb48"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.579035 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" event={"ID":"a8c49802-43de-4e97-8067-4824c3312194","Type":"ContainerStarted","Data":"960596d8d465585839dabab4f4e37caac7136af32261c08b324cfbae5b38baa6"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.580645 4985 generic.go:334] "Generic (PLEG): container finished" podID="3fa96974-2f91-4b24-b80d-4b221107adbe" containerID="a9c6ed608deb61caecdf9a7e92333e70d4f8ee32b3effd39e0cee52c210e16bf" exitCode=0 Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.580686 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" event={"ID":"3fa96974-2f91-4b24-b80d-4b221107adbe","Type":"ContainerDied","Data":"a9c6ed608deb61caecdf9a7e92333e70d4f8ee32b3effd39e0cee52c210e16bf"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.581001 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.603543 4985 generic.go:334] "Generic (PLEG): container finished" podID="4ee6bec9-a188-48bb-b49b-eeae08e55158" containerID="3f59ff416c187d2f8b3dfc26e51beca82ef7faf56497f399d4f52330b23ecb87" exitCode=0 Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.603984 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" event={"ID":"4ee6bec9-a188-48bb-b49b-eeae08e55158","Type":"ContainerDied","Data":"3f59ff416c187d2f8b3dfc26e51beca82ef7faf56497f399d4f52330b23ecb87"} Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.616671 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.617714 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.117699361 +0000 UTC m=+143.149635634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.648887 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-5j4gc" podStartSLOduration=123.648866763 podStartE2EDuration="2m3.648866763s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.590514115 +0000 UTC m=+142.622450388" watchObservedRunningTime="2026-01-25 00:08:52.648866763 +0000 UTC m=+142.680803036" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.690877 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" podStartSLOduration=123.690863129 podStartE2EDuration="2m3.690863129s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.648116332 +0000 UTC m=+142.680052615" watchObservedRunningTime="2026-01-25 00:08:52.690863129 +0000 UTC m=+142.722799402" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.692210 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-7gxv5" podStartSLOduration=123.692204714 podStartE2EDuration="2m3.692204714s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.689824271 +0000 UTC m=+142.721760554" watchObservedRunningTime="2026-01-25 00:08:52.692204714 +0000 UTC m=+142.724140987" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.718490 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.722655 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-dflg7"] Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.724014 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.224000442 +0000 UTC m=+143.255936715 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.764507 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.771231 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.798518 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jdgg6" podStartSLOduration=123.798494615 podStartE2EDuration="2m3.798494615s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.755425951 +0000 UTC m=+142.787362224" watchObservedRunningTime="2026-01-25 00:08:52.798494615 +0000 UTC m=+142.830430888" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.801769 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-pnk89"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.819530 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.819921 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.319906659 +0000 UTC m=+143.351842932 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.922150 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:52 crc kubenswrapper[4985]: E0125 00:08:52.923387 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.423375696 +0000 UTC m=+143.455311969 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.948638 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" podStartSLOduration=123.948625021 podStartE2EDuration="2m3.948625021s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:52.940471896 +0000 UTC m=+142.972408179" watchObservedRunningTime="2026-01-25 00:08:52.948625021 +0000 UTC m=+142.980561294" Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.951712 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-t7bhx"] Jan 25 00:08:52 crc kubenswrapper[4985]: I0125 00:08:52.956634 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-mpnhl"] Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.022735 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.023192 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.523174234 +0000 UTC m=+143.555110507 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.128547 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.144432 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.644412598 +0000 UTC m=+143.676348861 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.233651 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.233980 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.733965428 +0000 UTC m=+143.765901701 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.337280 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.83726872 +0000 UTC m=+143.869204993 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.337028 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.438613 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.439175 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:53.939159385 +0000 UTC m=+143.971095658 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.540257 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.540633 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.040618098 +0000 UTC m=+144.072554371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.638237 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bnmp5" event={"ID":"e74fd6cc-f34d-41c4-8d01-0f556277340d","Type":"ContainerStarted","Data":"e49cafde2e07528cecb8f2f9218723624f624577739dcc5178ffd0cbe10bab93"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.641299 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.641458 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.141433114 +0000 UTC m=+144.173369387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.641637 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.641902 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.141891576 +0000 UTC m=+144.173827849 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.674074 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-sffms" event={"ID":"e0fc4ac9-4ec8-4651-bd15-c55bbf199299","Type":"ContainerStarted","Data":"0f810d3601d10ee10129417c852059a79764bdfb6fa1ab01d2d3d15f0c1a4b17"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.675031 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.680880 4985 patch_prober.go:28] interesting pod/console-operator-58897d9998-sffms container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" start-of-body= Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.680922 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-sffms" podUID="e0fc4ac9-4ec8-4651-bd15-c55bbf199299" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.23:8443/readyz\": dial tcp 10.217.0.23:8443: connect: connection refused" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.742619 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.742951 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.242923358 +0000 UTC m=+144.274859621 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.750585 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" event={"ID":"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65","Type":"ContainerStarted","Data":"a6a6f427c4fd072105e15777859e1f9973ae0c4a8fc53935c105c643c50aa51e"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.759277 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-sffms" podStartSLOduration=124.759262498 podStartE2EDuration="2m4.759262498s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:53.695264032 +0000 UTC m=+143.727200315" watchObservedRunningTime="2026-01-25 00:08:53.759262498 +0000 UTC m=+143.791198771" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.778858 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" event={"ID":"9466dc5f-afcc-4586-bb92-cc23f5e64e77","Type":"ContainerStarted","Data":"5d919a6fa6bb15136e924c22239a7d0320072d551ce6024d3ed19b6b609bd7d4"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.795665 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" event={"ID":"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3","Type":"ContainerStarted","Data":"0ca9162a453f83a4eb00ffd4e43013581a8d8af0c24f00c648fea1af19f92198"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.799961 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dflg7" event={"ID":"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3","Type":"ContainerStarted","Data":"ad11dd1a75217205ad2506d9cc9e8932b1866706e8a7bfd37278afeabb4870a4"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.829435 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-n9j92" podStartSLOduration=124.829418977 podStartE2EDuration="2m4.829418977s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:53.828712808 +0000 UTC m=+143.860649081" watchObservedRunningTime="2026-01-25 00:08:53.829418977 +0000 UTC m=+143.861355240" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.845795 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.846346 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-54vvw" event={"ID":"5dd173ed-ab63-4006-9e39-2f4abf301a8e","Type":"ContainerStarted","Data":"1eb84789a0b1d58983a8966bdbce1807f1b2d5900c1eecfa0815eaf1b360b8d7"} Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.847696 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.347685208 +0000 UTC m=+144.379621481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.879285 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" podStartSLOduration=124.87926157 podStartE2EDuration="2m4.87926157s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:53.873295963 +0000 UTC m=+143.905232246" watchObservedRunningTime="2026-01-25 00:08:53.87926157 +0000 UTC m=+143.911197843" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.884893 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" event={"ID":"5d260c75-2257-4b95-982c-630a20b9d157","Type":"ContainerStarted","Data":"3fa909051ece08c69be1f442895c90b32a6c56de6b5b664b734bfb432af84fc0"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.900188 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" event={"ID":"ec54eb4a-a089-4c2a-9049-00a412be5916","Type":"ContainerStarted","Data":"894dcf2728238b1c084f5155e971f166b5d4421e52514b41d0dab4d34f9d786d"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.901581 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-54vvw" podStartSLOduration=124.901566298 podStartE2EDuration="2m4.901566298s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:53.899984396 +0000 UTC m=+143.931920679" watchObservedRunningTime="2026-01-25 00:08:53.901566298 +0000 UTC m=+143.933502571" Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.947555 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:53 crc kubenswrapper[4985]: E0125 00:08:53.948771 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.448755511 +0000 UTC m=+144.480691784 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.961753 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" event={"ID":"acd1a373-e926-421c-88a1-4f46fd6dcdb8","Type":"ContainerStarted","Data":"18ec52a3883134c8a6adeaea61641d2a83a007c7904a215bfd51814ce640b928"} Jan 25 00:08:53 crc kubenswrapper[4985]: I0125 00:08:53.986948 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" event={"ID":"22db9bf4-8af9-460e-ae6b-4874fe32053b","Type":"ContainerStarted","Data":"26b54f2dafc2ec9a32366d749a85b940c2d2d179a1c6549193f35e52d988c943"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.000973 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-g4x9b" event={"ID":"ca96a556-37cd-4d51-bc17-66eb3547c482","Type":"ContainerStarted","Data":"05da99b15a4c44b71a98fbe0bafd10f0973ba831c401ad0401cb011aae0ebd32"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.049880 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.050248 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.550235935 +0000 UTC m=+144.582172208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.071837 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-5z29b" event={"ID":"2bfd38dc-27e5-4906-a593-ea58e49340b8","Type":"ContainerStarted","Data":"819a49eab7ef8e814ef32832a115e2cd9f66968da26b91a9ca2820498ed2aa67"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.074325 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" event={"ID":"226aaa66-f8e9-42a6-b938-34b14f322d48","Type":"ContainerStarted","Data":"f02bd65264ac9f90a30fcbab1a69312ec9fb22cfd6683470f98de1fc8952829c"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.075360 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" event={"ID":"beb34140-c131-478f-94d6-c4b5433b58e9","Type":"ContainerStarted","Data":"35d019495702c9bcb01fe8b6248cc9fa21b0990dabf216c00df80c570bb0102d"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.098667 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" event={"ID":"25a97396-e69d-40d6-8734-95b5aaec338f","Type":"ContainerStarted","Data":"3ba0bd5cfb7a2c4de4ad333eb70a03b4b085696bab0b53ca76582b6af2d5b1ff"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.113481 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-54sg5" event={"ID":"ae20ca57-847f-4344-9718-aa179543b4ae","Type":"ContainerStarted","Data":"5e77ca9afd9da19267aca3ab6e2bb17bde6ec1c0a3698408545650c1ede12020"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.114460 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.127341 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.127402 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.129001 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-5z29b" podStartSLOduration=125.12898947 podStartE2EDuration="2m5.12898947s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.127933762 +0000 UTC m=+144.159870035" watchObservedRunningTime="2026-01-25 00:08:54.12898947 +0000 UTC m=+144.160925743" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.129298 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-g4x9b" podStartSLOduration=9.129291898 podStartE2EDuration="9.129291898s" podCreationTimestamp="2026-01-25 00:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.030132295 +0000 UTC m=+144.062068578" watchObservedRunningTime="2026-01-25 00:08:54.129291898 +0000 UTC m=+144.161228171" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.134994 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" event={"ID":"b82d04cc-00d3-43dc-8317-dacb594c8b61","Type":"ContainerStarted","Data":"4b7662a0a9651f73e61f7f31fad03324f9accbfa94d169488b4635e0cb487b40"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.166793 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.167085 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.667048172 +0000 UTC m=+144.698984445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.167392 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.169077 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.669055705 +0000 UTC m=+144.700991978 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.191977 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-54sg5" podStartSLOduration=125.191937438 podStartE2EDuration="2m5.191937438s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.187530332 +0000 UTC m=+144.219466625" watchObservedRunningTime="2026-01-25 00:08:54.191937438 +0000 UTC m=+144.223873711" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.202588 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" event={"ID":"a0cac91e-216e-424e-b665-cf28717932b0","Type":"ContainerStarted","Data":"4c5298af07c9a059147b37b0a69e714534cfab546f55baa82a9a0f26587bac00"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.207667 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" event={"ID":"3fa96974-2f91-4b24-b80d-4b221107adbe","Type":"ContainerStarted","Data":"6b0dfd788326f984cdd906d7453b4969fbb455f99bb1b8f486548f29dec706d9"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.214389 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" event={"ID":"cbf25816-bff6-42fc-8e43-513b490e830b","Type":"ContainerStarted","Data":"b6c8fa36670f3a281af669a9d8f7d5e34ac952894b2bea832b943846af13247a"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.223222 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-8ph92" podStartSLOduration=125.223204282 podStartE2EDuration="2m5.223204282s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.222633897 +0000 UTC m=+144.254570180" watchObservedRunningTime="2026-01-25 00:08:54.223204282 +0000 UTC m=+144.255140565" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.270023 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" event={"ID":"677a7eeb-960f-4771-bd2f-9fedef723ffd","Type":"ContainerStarted","Data":"0e28419ef1322ce2013cd0a9571a6561de255ab41ba17ed38540c86fd3fadae5"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.276688 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.277858 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.777841721 +0000 UTC m=+144.809777994 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.291468 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" podStartSLOduration=125.29145071 podStartE2EDuration="2m5.29145071s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.291215854 +0000 UTC m=+144.323152127" watchObservedRunningTime="2026-01-25 00:08:54.29145071 +0000 UTC m=+144.323386983" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.321035 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" event={"ID":"47fe0371-d4d4-40e2-905f-5e26db186cbe","Type":"ContainerStarted","Data":"2103456bf24c2b79559b6028429be8f7a9cfb1dc4ecf4464cac4c3673cb2d571"} Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.321178 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.325660 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.354967 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.360875 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-2dflz" podStartSLOduration=125.360862589 podStartE2EDuration="2m5.360862589s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:54.360396576 +0000 UTC m=+144.392332859" watchObservedRunningTime="2026-01-25 00:08:54.360862589 +0000 UTC m=+144.392798862" Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.382869 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.384155 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.884136862 +0000 UTC m=+144.916073185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.484224 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.484390 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.984367923 +0000 UTC m=+145.016304196 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.484877 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.488669 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:54.988656355 +0000 UTC m=+145.020592628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.586197 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.586867 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.086531524 +0000 UTC m=+145.118467797 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.587455 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.587770 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.087761407 +0000 UTC m=+145.119697680 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.690210 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.690389 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.190343389 +0000 UTC m=+145.222279662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.690655 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.690935 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.190926024 +0000 UTC m=+145.222862367 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.825720 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.826275 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.326260421 +0000 UTC m=+145.358196694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:54 crc kubenswrapper[4985]: I0125 00:08:54.930032 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:54 crc kubenswrapper[4985]: E0125 00:08:54.930400 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.430388414 +0000 UTC m=+145.462324687 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.004183 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.012147 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:55 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:55 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:55 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.012201 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.137341 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.137821 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.637789989 +0000 UTC m=+145.669726262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.268946 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.269243 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.769232602 +0000 UTC m=+145.801168875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.357280 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" event={"ID":"b82d04cc-00d3-43dc-8317-dacb594c8b61","Type":"ContainerStarted","Data":"1dafee37f735941fb484bc7a3db6891f26d75a8647c19af941f7828f2b7f7b08"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.361634 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" event={"ID":"cbf25816-bff6-42fc-8e43-513b490e830b","Type":"ContainerStarted","Data":"fd1b23fb4590c7cb030689101df9921047bff57f47cf22be7455ba7b1afd3c22"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.363679 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" event={"ID":"226aaa66-f8e9-42a6-b938-34b14f322d48","Type":"ContainerStarted","Data":"8826e26a26519db86156382ccba430ac2a1380b82a7173b50dc50ef8135542f0"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.363705 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" event={"ID":"226aaa66-f8e9-42a6-b938-34b14f322d48","Type":"ContainerStarted","Data":"ab2df89513053e0537b146bdf0c0a2a4e1b8a3c7258cffccb4b210397eee1c1f"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.364018 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.364913 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" event={"ID":"ec54eb4a-a089-4c2a-9049-00a412be5916","Type":"ContainerStarted","Data":"9f36a0209833888d17a747c6cdb3b5f1f83bd43b03b789021242b9f9dee6a0a2"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.366421 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" event={"ID":"4ee6bec9-a188-48bb-b49b-eeae08e55158","Type":"ContainerStarted","Data":"9439977dac2ce7752f84eea5aab6b17b468318d16cf30616f5f366068ab9f623"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.367561 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-g4x9b" event={"ID":"ca96a556-37cd-4d51-bc17-66eb3547c482","Type":"ContainerStarted","Data":"b373f3ee413968b56ba8a7f1dd25acaa3d994365505d1e089fc85187d9f9c040"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.369292 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.370500 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.870485599 +0000 UTC m=+145.902421862 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.372713 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" event={"ID":"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65","Type":"ContainerStarted","Data":"df3ea0dc4da8b50dcb1f77120ef92b796ef5e39cf1346b1ed775d8ae41c74e88"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.372987 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.376873 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" event={"ID":"beb34140-c131-478f-94d6-c4b5433b58e9","Type":"ContainerStarted","Data":"b9855bdbe48d3b096b121f768e8c701429f29cb8e827680035a4b570141cbbdc"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.379867 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-bfzz6" podStartSLOduration=126.379857446 podStartE2EDuration="2m6.379857446s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.378883331 +0000 UTC m=+145.410819604" watchObservedRunningTime="2026-01-25 00:08:55.379857446 +0000 UTC m=+145.411793719" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.385127 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.386004 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.394406 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" event={"ID":"b56b49d8-11f3-49bc-bad7-d24bd00f0589","Type":"ContainerStarted","Data":"1ea99e36fe7082057407918d1ec2c266b34217841f57aeee43dfbe5b4b3445c3"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.395564 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.397418 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" event={"ID":"d46a176e-d2cd-41cc-8420-37762bc47cd3","Type":"ContainerStarted","Data":"8a815f1bbae333416a2d10ed7455ae16755d33f05e68c4ec8ce37e0c74afb05e"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.397658 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" event={"ID":"d46a176e-d2cd-41cc-8420-37762bc47cd3","Type":"ContainerStarted","Data":"699dd34e8f4305974b5cbcc502a880e2f0fde0bb0914519831cb4bd420165116"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.407406 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-5j99m" event={"ID":"e27fe515-3c73-4be3-aaf4-34ed0f73992a","Type":"ContainerStarted","Data":"974f5172e0b3dd80df86bbe007d5b4c1fb35bd45e50a8e730a210f28d92f7297"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.418057 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.418516 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" event={"ID":"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3","Type":"ContainerStarted","Data":"5b2d6ba975ae04bc8dc505c48e3207631878fe63828651800ce46e01904aeb34"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.420340 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" event={"ID":"a0cac91e-216e-424e-b665-cf28717932b0","Type":"ContainerStarted","Data":"4b010f166a6c42a2fc29e3cd2d2408e5047c36e33bf1c83a7ea49557d7bd5984"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.422053 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" event={"ID":"7e68708c-6c3b-43d8-8005-1e144e5f8ad1","Type":"ContainerStarted","Data":"31f6b415083622632b4a278fc4a2c97a699ae2d3af04635ad1ebe492150aa701"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.422292 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.425141 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dflg7" event={"ID":"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3","Type":"ContainerStarted","Data":"e7919743666c62a1e18d561ceba2f5847bcb0e774ac7ec8694ff8ff82233f0ff"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.425186 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-dflg7" event={"ID":"07ecbb8f-d2fb-4933-a4b1-f411717ee5f3","Type":"ContainerStarted","Data":"a39fa345ed657f07af43ee51378b9bb2b613c1953285eb4644616ef0c3c99079"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.425632 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-dflg7" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.427244 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" event={"ID":"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c","Type":"ContainerStarted","Data":"995d9ae7c65ddcbed59e81816d72f5dabe0f711f8398f7ce128d2bcc7868b07a"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.427270 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" event={"ID":"6f8695ce-bb8b-4288-bf4d-bd30db7eeb8c","Type":"ContainerStarted","Data":"4f03b4fb3629cc26aaf199b618a86682d59cab68d2bcfced0719b07c2dfe3dd5"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.428203 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-g229p" event={"ID":"b1f335db-7e31-44a9-b113-bb546349caa7","Type":"ContainerStarted","Data":"d0ee092a3282b9ec57fde0cfc0c7c1330b9a3cd737fc1b0b58d29e4d7ff564ed"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.432210 4985 generic.go:334] "Generic (PLEG): container finished" podID="4c2471c6-f9fd-439f-a0cf-1e4e166ed30b" containerID="4354ae6a9b58406ef7ee5e7b2e6f0196ad7179c8e5f394ea412d98a2a1b35a01" exitCode=0 Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.432323 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" event={"ID":"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b","Type":"ContainerDied","Data":"4354ae6a9b58406ef7ee5e7b2e6f0196ad7179c8e5f394ea412d98a2a1b35a01"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.477033 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.477554 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:55.97754169 +0000 UTC m=+146.009477953 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.483849 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" event={"ID":"5d260c75-2257-4b95-982c-630a20b9d157","Type":"ContainerStarted","Data":"834dd6302c318c3fe465aae329485aef90494d1eeeab3b8162933167233444f3"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.485867 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" event={"ID":"16f872f6-c454-452f-adf9-bee0a76ebe2b","Type":"ContainerStarted","Data":"afcdb494f9c0d760a0a4758cffe5c9aacdaaac090360d48c2f32320c4891a386"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.486608 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.488740 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.494734 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.494768 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" event={"ID":"acd1a373-e926-421c-88a1-4f46fd6dcdb8","Type":"ContainerStarted","Data":"2b89c173d0acbc64500d96b8e8c42ef47d97290be7fea43b8758e9512cbb9ba6"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.494786 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" event={"ID":"acd1a373-e926-421c-88a1-4f46fd6dcdb8","Type":"ContainerStarted","Data":"fa4aae28a9a3a832fcaef942deaa15009daf02c139741179c0478035c9cf44b3"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.516197 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.518201 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" podStartSLOduration=126.518166351 podStartE2EDuration="2m6.518166351s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.475579559 +0000 UTC m=+145.507515842" watchObservedRunningTime="2026-01-25 00:08:55.518166351 +0000 UTC m=+145.550102624" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.527182 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.530072 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.540631 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" event={"ID":"22db9bf4-8af9-460e-ae6b-4874fe32053b","Type":"ContainerStarted","Data":"b2637935876a6f0bb031f84c7ec4c0d68c842e296206f7fa3102dba6af0d548f"} Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.540675 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.540794 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.540824 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.553613 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-dccnf" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.560680 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" podStartSLOduration=126.56066999 podStartE2EDuration="2m6.56066999s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.558992616 +0000 UTC m=+145.590928889" watchObservedRunningTime="2026-01-25 00:08:55.56066999 +0000 UTC m=+145.592606263" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.585129 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.585480 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xzwjt\" (UniqueName: \"kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.585616 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.585694 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tm5cl\" (UniqueName: \"kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.586124 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.586233 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.586280 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.586365 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.086350607 +0000 UTC m=+146.118286870 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.595996 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-w7k95" podStartSLOduration=126.595981531 podStartE2EDuration="2m6.595981531s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.590933478 +0000 UTC m=+145.622869751" watchObservedRunningTime="2026-01-25 00:08:55.595981531 +0000 UTC m=+145.627917804" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.619741 4985 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.623797 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" podStartSLOduration=127.623775892 podStartE2EDuration="2m7.623775892s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.62253169 +0000 UTC m=+145.654467973" watchObservedRunningTime="2026-01-25 00:08:55.623775892 +0000 UTC m=+145.655712165" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.651067 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-znxjv" podStartSLOduration=127.651049231 podStartE2EDuration="2m7.651049231s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.649351627 +0000 UTC m=+145.681287900" watchObservedRunningTime="2026-01-25 00:08:55.651049231 +0000 UTC m=+145.682985504" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.663449 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.692868 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xzwjt\" (UniqueName: \"kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.692926 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.692954 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.692982 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tm5cl\" (UniqueName: \"kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693062 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693084 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693124 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693580 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693624 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.693641 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.694284 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.694802 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.194792444 +0000 UTC m=+146.226728717 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.726183 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.727239 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.727403 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" podStartSLOduration=126.727388732 podStartE2EDuration="2m6.727388732s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.710038145 +0000 UTC m=+145.741974428" watchObservedRunningTime="2026-01-25 00:08:55.727388732 +0000 UTC m=+145.759325005" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.751035 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tm5cl\" (UniqueName: \"kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl\") pod \"community-operators-77pvn\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.756654 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xzwjt\" (UniqueName: \"kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt\") pod \"certified-operators-nsrmt\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.773248 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-sffms" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.794292 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.794574 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.294551172 +0000 UTC m=+146.326487445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.804536 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-kz648" podStartSLOduration=126.804521445 podStartE2EDuration="2m6.804521445s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.778914541 +0000 UTC m=+145.810850824" watchObservedRunningTime="2026-01-25 00:08:55.804521445 +0000 UTC m=+145.836457718" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.804935 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.842026 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" podStartSLOduration=126.842011662 podStartE2EDuration="2m6.842011662s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.841761716 +0000 UTC m=+145.873698009" watchObservedRunningTime="2026-01-25 00:08:55.842011662 +0000 UTC m=+145.873947935" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.859838 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.881014 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.881902 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898095 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898220 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fr4hm\" (UniqueName: \"kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898252 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898294 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g8s22\" (UniqueName: \"kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898344 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898360 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.898387 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:55 crc kubenswrapper[4985]: E0125 00:08:55.898644 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.398633365 +0000 UTC m=+146.430569638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.978217 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.980071 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-zp4dh" podStartSLOduration=126.98005856 podStartE2EDuration="2m6.98005856s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:55.97700117 +0000 UTC m=+146.008937443" watchObservedRunningTime="2026-01-25 00:08:55.98005856 +0000 UTC m=+146.011994833" Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.980982 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.987009 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:55 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:55 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:55 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:55 crc kubenswrapper[4985]: I0125 00:08:55.987055 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012172 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.012477 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.512461023 +0000 UTC m=+146.544397296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012531 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012559 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012600 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fr4hm\" (UniqueName: \"kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012637 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012675 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g8s22\" (UniqueName: \"kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012722 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.012741 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.014214 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.014645 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.014995 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.015324 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.016044 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.516033208 +0000 UTC m=+146.547969481 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.026164 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.053390 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g8s22\" (UniqueName: \"kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22\") pod \"certified-operators-czq57\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.076841 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fr4hm\" (UniqueName: \"kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm\") pod \"community-operators-pp7k8\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.088710 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.113668 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.114019 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.614004539 +0000 UTC m=+146.645940812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.162039 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-mp62x" podStartSLOduration=127.162018254 podStartE2EDuration="2m7.162018254s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.114349949 +0000 UTC m=+146.146286222" watchObservedRunningTime="2026-01-25 00:08:56.162018254 +0000 UTC m=+146.193954597" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.197643 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-g9xw9" podStartSLOduration=127.197622532 podStartE2EDuration="2m7.197622532s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.183967492 +0000 UTC m=+146.215903785" watchObservedRunningTime="2026-01-25 00:08:56.197622532 +0000 UTC m=+146.229558805" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.215577 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.216054 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.716042828 +0000 UTC m=+146.747979101 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.287998 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.288555 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-dflg7" podStartSLOduration=11.288531758 podStartE2EDuration="11.288531758s" podCreationTimestamp="2026-01-25 00:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.244465356 +0000 UTC m=+146.276401649" watchObservedRunningTime="2026-01-25 00:08:56.288531758 +0000 UTC m=+146.320468031" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.316595 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.316975 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.816958806 +0000 UTC m=+146.848895079 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.343800 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-764mr" podStartSLOduration=127.343780413 podStartE2EDuration="2m7.343780413s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.313226238 +0000 UTC m=+146.345162531" watchObservedRunningTime="2026-01-25 00:08:56.343780413 +0000 UTC m=+146.375716686" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.344469 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-pnk89" podStartSLOduration=127.344465011 podStartE2EDuration="2m7.344465011s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.341459712 +0000 UTC m=+146.373395985" watchObservedRunningTime="2026-01-25 00:08:56.344465011 +0000 UTC m=+146.376401284" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.418617 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.419246 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-25 00:08:56.919233841 +0000 UTC m=+146.951170104 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-l8stl" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.461912 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" podStartSLOduration=128.461894695 podStartE2EDuration="2m8.461894695s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.433630251 +0000 UTC m=+146.465566524" watchObservedRunningTime="2026-01-25 00:08:56.461894695 +0000 UTC m=+146.493830968" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.500678 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-5j99m" podStartSLOduration=11.500661017 podStartE2EDuration="11.500661017s" podCreationTimestamp="2026-01-25 00:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.477417934 +0000 UTC m=+146.509354207" watchObservedRunningTime="2026-01-25 00:08:56.500661017 +0000 UTC m=+146.532597290" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.520555 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:56 crc kubenswrapper[4985]: E0125 00:08:56.521083 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-25 00:08:57.021069264 +0000 UTC m=+147.053005537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.543246 4985 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-bwg9c container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.543310 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" podUID="22db9bf4-8af9-460e-ae6b-4874fe32053b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.42:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.579755 4985 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-25T00:08:55.619771837Z","Handler":null,"Name":""} Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.597160 4985 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.597208 4985 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.622099 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.624629 4985 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.624750 4985 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.656809 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" event={"ID":"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b","Type":"ContainerStarted","Data":"f31c468e5b5b334d32437f3746f4dcea3c821a432b251f8b0f57e009a6916630"} Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.682148 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" event={"ID":"1a3ef24c-1ed4-461f-ae53-9dbd9a150fa3","Type":"ContainerStarted","Data":"86c85c2ba6396d0eb0c2f1d0c16f0eda7dd994b017aa99d30da6eb5c4f3f8f6e"} Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.700382 4985 csr.go:261] certificate signing request csr-ws7dx is approved, waiting to be issued Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.701657 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" event={"ID":"b56b49d8-11f3-49bc-bad7-d24bd00f0589","Type":"ContainerStarted","Data":"d66f60a3e5b0646007ab1a7d0cd16ffd0b4108e87112140e38a56c461340e165"} Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.701683 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" event={"ID":"b56b49d8-11f3-49bc-bad7-d24bd00f0589","Type":"ContainerStarted","Data":"1e0b20b0fdf787cc6f32ca43063991808f050d61a88cb79b691690f2dbf36fd4"} Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.709998 4985 csr.go:257] certificate signing request csr-ws7dx is issued Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.717639 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.717687 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.727795 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-mpnhl" podStartSLOduration=127.727777611 podStartE2EDuration="2m7.727777611s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.726363593 +0000 UTC m=+146.758299876" watchObservedRunningTime="2026-01-25 00:08:56.727777611 +0000 UTC m=+146.759713884" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.762611 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-l8stl\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.768748 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-bwg9c" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.778265 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.790222 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-4vgrr" podStartSLOduration=11.790201685 podStartE2EDuration="11.790201685s" podCreationTimestamp="2026-01-25 00:08:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:56.776698829 +0000 UTC m=+146.808635102" watchObservedRunningTime="2026-01-25 00:08:56.790201685 +0000 UTC m=+146.822137958" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.826236 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.897255 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.948739 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.992807 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:56 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:56 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:56 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.992869 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:56 crc kubenswrapper[4985]: I0125 00:08:56.997590 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:08:57 crc kubenswrapper[4985]: W0125 00:08:57.013662 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fcaee3d_2838_4823_b0fd_f6285ebfe74c.slice/crio-83eb0e9ae8dde25bb90f3a6872d2544296956ae2bcf676cd9e1bac6a6dc5f340 WatchSource:0}: Error finding container 83eb0e9ae8dde25bb90f3a6872d2544296956ae2bcf676cd9e1bac6a6dc5f340: Status 404 returned error can't find the container with id 83eb0e9ae8dde25bb90f3a6872d2544296956ae2bcf676cd9e1bac6a6dc5f340 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.065069 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.104671 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:08:57 crc kubenswrapper[4985]: W0125 00:08:57.191491 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4daece71_11c1_4ef2_8cae_ff8e392d1abe.slice/crio-fba643509d226555b244e2ed690fc914b78473fa674d996ffa37ee6af2b1974d WatchSource:0}: Error finding container fba643509d226555b244e2ed690fc914b78473fa674d996ffa37ee6af2b1974d: Status 404 returned error can't find the container with id fba643509d226555b244e2ed690fc914b78473fa674d996ffa37ee6af2b1974d Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.335757 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.360162 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.360233 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.360274 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.360295 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.362310 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:57 crc kubenswrapper[4985]: W0125 00:08:57.366750 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3a56e76_0d21_4576_91ec_87099bd8f5e9.slice/crio-88ade9b0be0da3fb263b6acd2bfc28adac13ccc5aeb7954b5d52719606cab2b2 WatchSource:0}: Error finding container 88ade9b0be0da3fb263b6acd2bfc28adac13ccc5aeb7954b5d52719606cab2b2: Status 404 returned error can't find the container with id 88ade9b0be0da3fb263b6acd2bfc28adac13ccc5aeb7954b5d52719606cab2b2 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.367987 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.369732 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.370303 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.445780 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.447116 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.449306 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.459157 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.500552 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.513478 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.525734 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.567726 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.568207 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.568249 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz6sd\" (UniqueName: \"kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.668986 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.669066 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.669097 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz6sd\" (UniqueName: \"kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.669809 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.670037 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.689841 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz6sd\" (UniqueName: \"kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd\") pod \"redhat-marketplace-rl7bj\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.711006 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-25 00:03:56 +0000 UTC, rotation deadline is 2026-10-29 01:10:23.083979779 +0000 UTC Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.711032 4985 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6649h1m25.372950222s for next certificate rotation Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.713173 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" event={"ID":"e3a56e76-0d21-4576-91ec-87099bd8f5e9","Type":"ContainerStarted","Data":"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.713209 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" event={"ID":"e3a56e76-0d21-4576-91ec-87099bd8f5e9","Type":"ContainerStarted","Data":"88ade9b0be0da3fb263b6acd2bfc28adac13ccc5aeb7954b5d52719606cab2b2"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.714091 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.719324 4985 generic.go:334] "Generic (PLEG): container finished" podID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerID="ea0dca48057794d9f6e4010d2ef243741875e6e15fa2c6706d5a9cf1abe65cc1" exitCode=0 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.719386 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerDied","Data":"ea0dca48057794d9f6e4010d2ef243741875e6e15fa2c6706d5a9cf1abe65cc1"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.719411 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerStarted","Data":"fba643509d226555b244e2ed690fc914b78473fa674d996ffa37ee6af2b1974d"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.721930 4985 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.724359 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerID="07df81b8a29b83053ae66d807233ba3a4a8e4d972515693b38feba46e70e57a2" exitCode=0 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.724415 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerDied","Data":"07df81b8a29b83053ae66d807233ba3a4a8e4d972515693b38feba46e70e57a2"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.724437 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerStarted","Data":"83eb0e9ae8dde25bb90f3a6872d2544296956ae2bcf676cd9e1bac6a6dc5f340"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.762806 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" event={"ID":"4c2471c6-f9fd-439f-a0cf-1e4e166ed30b","Type":"ContainerStarted","Data":"b2f699b0c6bbc230f95435fbeb679385317478b5e97d630f6addb64fa5b13ba5"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.796439 4985 generic.go:334] "Generic (PLEG): container finished" podID="badea0b3-377c-4171-931a-2fc2a9a07922" containerID="c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b" exitCode=0 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.796544 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerDied","Data":"c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.796586 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerStarted","Data":"59aafd4f3c9de8ebf94ba598e68a001ec4d574ac12acc413925eed46aa0c8ad4"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.797414 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" podStartSLOduration=128.797399041 podStartE2EDuration="2m8.797399041s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:57.749271373 +0000 UTC m=+147.781207656" watchObservedRunningTime="2026-01-25 00:08:57.797399041 +0000 UTC m=+147.829335304" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.825718 4985 generic.go:334] "Generic (PLEG): container finished" podID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerID="b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1" exitCode=0 Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.826605 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerDied","Data":"b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.826627 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerStarted","Data":"4a76a13ede9ca07167e6278fb8c93305b1e73c4e05ffc6ea4c2ebce5d1f1641a"} Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.863067 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.870306 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.886488 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" podStartSLOduration=129.886470888 podStartE2EDuration="2m9.886470888s" podCreationTimestamp="2026-01-25 00:06:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:08:57.88193988 +0000 UTC m=+147.913876153" watchObservedRunningTime="2026-01-25 00:08:57.886470888 +0000 UTC m=+147.918407161" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.901785 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.902853 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.983509 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.983596 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.983755 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vg4fk\" (UniqueName: \"kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.988546 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:57 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:57 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:57 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:57 crc kubenswrapper[4985]: I0125 00:08:57.988590 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.084913 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vg4fk\" (UniqueName: \"kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.085014 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.085044 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.085452 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.085926 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.126975 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vg4fk\" (UniqueName: \"kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk\") pod \"redhat-marketplace-4q65s\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.203033 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.203993 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.244132 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.261440 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.297325 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.345810 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:08:58 crc kubenswrapper[4985]: W0125 00:08:58.361093 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7a26fe5e_9560_455a_a98e_6185e89ee607.slice/crio-12cf13d4b13d34c74562aa115d78e7dff784edafebd18ff6a62677963cecacc9 WatchSource:0}: Error finding container 12cf13d4b13d34c74562aa115d78e7dff784edafebd18ff6a62677963cecacc9: Status 404 returned error can't find the container with id 12cf13d4b13d34c74562aa115d78e7dff784edafebd18ff6a62677963cecacc9 Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.450599 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.459283 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.462137 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.463490 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.467395 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.467438 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.467526 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.467565 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.487947 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.487981 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.501492 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.501545 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.501626 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z89zm\" (UniqueName: \"kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.502096 4985 patch_prober.go:28] interesting pod/console-f9d7485db-54vvw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.502188 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-54vvw" podUID="5dd173ed-ab63-4006-9e39-2f4abf301a8e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.510061 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.515228 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.515274 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.603305 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z89zm\" (UniqueName: \"kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.603362 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.603416 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.604703 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.604809 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.624592 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z89zm\" (UniqueName: \"kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm\") pod \"redhat-operators-zcf2n\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.780514 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.843761 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.845028 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.847770 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerStarted","Data":"12cf13d4b13d34c74562aa115d78e7dff784edafebd18ff6a62677963cecacc9"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.850127 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bc1204ea89aa6552eac5adc7f89a21f34a28e62f2a6b3cae76dc1813f041c57f"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.850154 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"4dcf2ab06fc16b8b5d8aa2a183db8b1c8098d28e45969fb2122b376ea5e6aeb9"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.850288 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.857329 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.859175 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6fada835b0a275e7b366a03b2968c98dd4fdbd76ed983c1471374191d9bc357c"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.859212 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"4a536a7b83b670291ce07306b7249a68bdf3559269c7983b30d7f362825e3957"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.887841 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"306bc1deadd0783bcca92e032fcf30853a7fdd262c0edd294e7e703f8467d7a7"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.887972 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5229bc32f1b5fa8ae7ebc42585c278b796d3318418d77bd884213c828273aad4"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.904641 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerStarted","Data":"92a0e0d5e970e476c4df347560232771c4036c126f762712ae2caaa9d24a8a7d"} Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.906704 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cjkcr\" (UniqueName: \"kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.906835 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.907018 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.924318 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-jql78" Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.998998 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:58 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:58 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:58 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:58 crc kubenswrapper[4985]: I0125 00:08:58.999052 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.014122 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.014216 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cjkcr\" (UniqueName: \"kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.014271 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.015979 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.028520 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.075092 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cjkcr\" (UniqueName: \"kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr\") pod \"redhat-operators-d5j6f\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.208136 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.289480 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.493672 4985 patch_prober.go:28] interesting pod/apiserver-76f77b778f-ctsj5 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]log ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]etcd ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/start-apiserver-admission-initializer ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/generic-apiserver-start-informers ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/max-in-flight-filter ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/storage-object-count-tracker-hook ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/image.openshift.io-apiserver-caches ok Jan 25 00:08:59 crc kubenswrapper[4985]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Jan 25 00:08:59 crc kubenswrapper[4985]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/project.openshift.io-projectcache ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/openshift.io-startinformers ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/openshift.io-restmapperupdater ok Jan 25 00:08:59 crc kubenswrapper[4985]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Jan 25 00:08:59 crc kubenswrapper[4985]: livez check failed Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.493948 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" podUID="4c2471c6-f9fd-439f-a0cf-1e4e166ed30b" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.652756 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:08:59 crc kubenswrapper[4985]: W0125 00:08:59.688562 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5b7e13c0_2e33_4cf9_9cec_7b6481b29bd1.slice/crio-57b8a88fd104b2fb3353e1ab6fe7017fb5783c1320e6b0830fb0880b680d0a88 WatchSource:0}: Error finding container 57b8a88fd104b2fb3353e1ab6fe7017fb5783c1320e6b0830fb0880b680d0a88: Status 404 returned error can't find the container with id 57b8a88fd104b2fb3353e1ab6fe7017fb5783c1320e6b0830fb0880b680d0a88 Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.908412 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerStarted","Data":"57b8a88fd104b2fb3353e1ab6fe7017fb5783c1320e6b0830fb0880b680d0a88"} Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.916783 4985 generic.go:334] "Generic (PLEG): container finished" podID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerID="440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b" exitCode=0 Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.916848 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerDied","Data":"440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b"} Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.916875 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerStarted","Data":"9d9637879b810b11d0d0bd4a6a62821363396ee1f0b64b4916b37a0f608e1536"} Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.943209 4985 generic.go:334] "Generic (PLEG): container finished" podID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerID="aa99e429218be279a7fb3f9296fd1c4bdce6196b344e5f85731a8241d93f4b44" exitCode=0 Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.943514 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerDied","Data":"aa99e429218be279a7fb3f9296fd1c4bdce6196b344e5f85731a8241d93f4b44"} Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.963326 4985 generic.go:334] "Generic (PLEG): container finished" podID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerID="7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4" exitCode=0 Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.963456 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerDied","Data":"7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4"} Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.989514 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.993377 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:08:59 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:08:59 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:08:59 crc kubenswrapper[4985]: healthz check failed Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.993416 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.994600 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.995556 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:08:59 crc kubenswrapper[4985]: I0125 00:08:59.999062 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:08:59.999717 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.011811 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.141443 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.141585 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.242473 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.242531 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.242692 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.290794 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.446230 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.663262 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.664288 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.666486 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.666523 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.679070 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.768817 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.768908 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.871479 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.871560 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.871639 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.902158 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.971761 4985 generic.go:334] "Generic (PLEG): container finished" podID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerID="1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667" exitCode=0 Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.971889 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerDied","Data":"1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667"} Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.990581 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:00 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:00 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:00 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:00 crc kubenswrapper[4985]: I0125 00:09:00.990632 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.009277 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.112165 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 25 00:09:01 crc kubenswrapper[4985]: W0125 00:09:01.121395 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod1ab4c861_cbe1_488d_888c_53221ba05a24.slice/crio-ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a WatchSource:0}: Error finding container ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a: Status 404 returned error can't find the container with id ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.331963 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 25 00:09:01 crc kubenswrapper[4985]: W0125 00:09:01.382248 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podefb9d4d4_9c08_4eca_a43d_c7ee737f1788.slice/crio-c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30 WatchSource:0}: Error finding container c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30: Status 404 returned error can't find the container with id c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30 Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.986887 4985 generic.go:334] "Generic (PLEG): container finished" podID="677a7eeb-960f-4771-bd2f-9fedef723ffd" containerID="0e28419ef1322ce2013cd0a9571a6561de255ab41ba17ed38540c86fd3fadae5" exitCode=0 Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.987149 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" event={"ID":"677a7eeb-960f-4771-bd2f-9fedef723ffd","Type":"ContainerDied","Data":"0e28419ef1322ce2013cd0a9571a6561de255ab41ba17ed38540c86fd3fadae5"} Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.990753 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1ab4c861-cbe1-488d-888c-53221ba05a24","Type":"ContainerStarted","Data":"70851db50335f39a64d1edfd929e7982b59996b1a2c9e980d6cca8105e52eb94"} Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.990795 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1ab4c861-cbe1-488d-888c-53221ba05a24","Type":"ContainerStarted","Data":"ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a"} Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.992820 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:01 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:01 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:01 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.992868 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:01 crc kubenswrapper[4985]: I0125 00:09:01.994083 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"efb9d4d4-9c08-4eca-a43d-c7ee737f1788","Type":"ContainerStarted","Data":"c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30"} Jan 25 00:09:02 crc kubenswrapper[4985]: I0125 00:09:02.036143 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.036127119 podStartE2EDuration="3.036127119s" podCreationTimestamp="2026-01-25 00:08:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:09:02.030895963 +0000 UTC m=+152.062832256" watchObservedRunningTime="2026-01-25 00:09:02.036127119 +0000 UTC m=+152.068063392" Jan 25 00:09:02 crc kubenswrapper[4985]: I0125 00:09:02.984131 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:02 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:02 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:02 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:02 crc kubenswrapper[4985]: I0125 00:09:02.984390 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.070274 4985 generic.go:334] "Generic (PLEG): container finished" podID="efb9d4d4-9c08-4eca-a43d-c7ee737f1788" containerID="f64abd51fb4300e594040f6c3bed57662f79e46b594be47e0a7b06d11b4d54d2" exitCode=0 Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.071114 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"efb9d4d4-9c08-4eca-a43d-c7ee737f1788","Type":"ContainerDied","Data":"f64abd51fb4300e594040f6c3bed57662f79e46b594be47e0a7b06d11b4d54d2"} Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.086044 4985 generic.go:334] "Generic (PLEG): container finished" podID="1ab4c861-cbe1-488d-888c-53221ba05a24" containerID="70851db50335f39a64d1edfd929e7982b59996b1a2c9e980d6cca8105e52eb94" exitCode=0 Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.086362 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1ab4c861-cbe1-488d-888c-53221ba05a24","Type":"ContainerDied","Data":"70851db50335f39a64d1edfd929e7982b59996b1a2c9e980d6cca8105e52eb94"} Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.527584 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.536880 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-ctsj5" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.565205 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.747992 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") pod \"677a7eeb-960f-4771-bd2f-9fedef723ffd\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.748088 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brg6q\" (UniqueName: \"kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q\") pod \"677a7eeb-960f-4771-bd2f-9fedef723ffd\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.748196 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume\") pod \"677a7eeb-960f-4771-bd2f-9fedef723ffd\" (UID: \"677a7eeb-960f-4771-bd2f-9fedef723ffd\") " Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.749090 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume" (OuterVolumeSpecName: "config-volume") pod "677a7eeb-960f-4771-bd2f-9fedef723ffd" (UID: "677a7eeb-960f-4771-bd2f-9fedef723ffd"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.770497 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "677a7eeb-960f-4771-bd2f-9fedef723ffd" (UID: "677a7eeb-960f-4771-bd2f-9fedef723ffd"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.771731 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q" (OuterVolumeSpecName: "kube-api-access-brg6q") pod "677a7eeb-960f-4771-bd2f-9fedef723ffd" (UID: "677a7eeb-960f-4771-bd2f-9fedef723ffd"). InnerVolumeSpecName "kube-api-access-brg6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.849696 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brg6q\" (UniqueName: \"kubernetes.io/projected/677a7eeb-960f-4771-bd2f-9fedef723ffd-kube-api-access-brg6q\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.849732 4985 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/677a7eeb-960f-4771-bd2f-9fedef723ffd-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.849745 4985 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/677a7eeb-960f-4771-bd2f-9fedef723ffd-config-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.985031 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:03 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:03 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:03 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:03 crc kubenswrapper[4985]: I0125 00:09:03.985081 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.120437 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" event={"ID":"677a7eeb-960f-4771-bd2f-9fedef723ffd","Type":"ContainerDied","Data":"9c33d93fa12e596472a04fd501667841b5cf352166bb912b2c8743661317cf8a"} Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.120491 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c33d93fa12e596472a04fd501667841b5cf352166bb912b2c8743661317cf8a" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.120566 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488320-8l78v" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.479625 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.571609 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.665845 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access\") pod \"1ab4c861-cbe1-488d-888c-53221ba05a24\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.665887 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir\") pod \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.665920 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access\") pod \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\" (UID: \"efb9d4d4-9c08-4eca-a43d-c7ee737f1788\") " Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.665989 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir\") pod \"1ab4c861-cbe1-488d-888c-53221ba05a24\" (UID: \"1ab4c861-cbe1-488d-888c-53221ba05a24\") " Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.666295 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "efb9d4d4-9c08-4eca-a43d-c7ee737f1788" (UID: "efb9d4d4-9c08-4eca-a43d-c7ee737f1788"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.666546 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1ab4c861-cbe1-488d-888c-53221ba05a24" (UID: "1ab4c861-cbe1-488d-888c-53221ba05a24"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.667001 4985 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ab4c861-cbe1-488d-888c-53221ba05a24-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.667018 4985 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.681259 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1ab4c861-cbe1-488d-888c-53221ba05a24" (UID: "1ab4c861-cbe1-488d-888c-53221ba05a24"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.681436 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "efb9d4d4-9c08-4eca-a43d-c7ee737f1788" (UID: "efb9d4d4-9c08-4eca-a43d-c7ee737f1788"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.770253 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ab4c861-cbe1-488d-888c-53221ba05a24-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.770288 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/efb9d4d4-9c08-4eca-a43d-c7ee737f1788-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:04 crc kubenswrapper[4985]: I0125 00:09:04.938385 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-dflg7" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.000499 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:05 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:05 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:05 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.000838 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.135711 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"1ab4c861-cbe1-488d-888c-53221ba05a24","Type":"ContainerDied","Data":"ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a"} Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.135750 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ca59e827b92eb4468f8f0a249fad6c4f38798543730c53b414724190e5e31d8a" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.135821 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.144885 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"efb9d4d4-9c08-4eca-a43d-c7ee737f1788","Type":"ContainerDied","Data":"c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30"} Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.144913 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9e8fd57e76642cc231e24bb317410ec85ac3b3196edecc0fd21d8ac45b33d30" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.144940 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.841020 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.841087 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.986841 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:05 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:05 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:05 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:05 crc kubenswrapper[4985]: I0125 00:09:05.986898 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:06 crc kubenswrapper[4985]: I0125 00:09:06.983590 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:06 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:06 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:06 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:06 crc kubenswrapper[4985]: I0125 00:09:06.983647 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:07 crc kubenswrapper[4985]: I0125 00:09:07.983752 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:07 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:07 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:07 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:07 crc kubenswrapper[4985]: I0125 00:09:07.983817 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.468078 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.472017 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.472225 4985 patch_prober.go:28] interesting pod/downloads-7954f5f757-54sg5 container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.472300 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-54sg5" podUID="ae20ca57-847f-4344-9718-aa179543b4ae" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.546612 4985 patch_prober.go:28] interesting pod/console-f9d7485db-54vvw container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.546677 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-54vvw" podUID="5dd173ed-ab63-4006-9e39-2f4abf301a8e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.35:8443/health\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.983499 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:08 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:08 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:08 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:08 crc kubenswrapper[4985]: I0125 00:09:08.983572 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:09 crc kubenswrapper[4985]: I0125 00:09:09.984686 4985 patch_prober.go:28] interesting pod/router-default-5444994796-5z29b container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 25 00:09:09 crc kubenswrapper[4985]: [-]has-synced failed: reason withheld Jan 25 00:09:09 crc kubenswrapper[4985]: [+]process-running ok Jan 25 00:09:09 crc kubenswrapper[4985]: healthz check failed Jan 25 00:09:09 crc kubenswrapper[4985]: I0125 00:09:09.984740 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-5z29b" podUID="2bfd38dc-27e5-4906-a593-ea58e49340b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 25 00:09:10 crc kubenswrapper[4985]: I0125 00:09:10.987620 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:09:10 crc kubenswrapper[4985]: I0125 00:09:10.993236 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-5z29b" Jan 25 00:09:12 crc kubenswrapper[4985]: I0125 00:09:12.201357 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:09:12 crc kubenswrapper[4985]: I0125 00:09:12.263276 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/39723ce0-614f-4ada-9cc7-6efe79c7e51c-metrics-certs\") pod \"network-metrics-daemon-cqtvp\" (UID: \"39723ce0-614f-4ada-9cc7-6efe79c7e51c\") " pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:09:12 crc kubenswrapper[4985]: I0125 00:09:12.330649 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-cqtvp" Jan 25 00:09:13 crc kubenswrapper[4985]: I0125 00:09:13.294602 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:09:13 crc kubenswrapper[4985]: I0125 00:09:13.295220 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" containerID="cri-o://0e8f2782261f9d349ab9b076ec7ba6e4cc1b1621871372bbe096931f7e0941bd" gracePeriod=30 Jan 25 00:09:13 crc kubenswrapper[4985]: I0125 00:09:13.296678 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:09:13 crc kubenswrapper[4985]: I0125 00:09:13.296938 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" containerID="cri-o://df3ea0dc4da8b50dcb1f77120ef92b796ef5e39cf1346b1ed775d8ae41c74e88" gracePeriod=30 Jan 25 00:09:15 crc kubenswrapper[4985]: I0125 00:09:15.312962 4985 generic.go:334] "Generic (PLEG): container finished" podID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerID="df3ea0dc4da8b50dcb1f77120ef92b796ef5e39cf1346b1ed775d8ae41c74e88" exitCode=0 Jan 25 00:09:15 crc kubenswrapper[4985]: I0125 00:09:15.313041 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" event={"ID":"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65","Type":"ContainerDied","Data":"df3ea0dc4da8b50dcb1f77120ef92b796ef5e39cf1346b1ed775d8ae41c74e88"} Jan 25 00:09:15 crc kubenswrapper[4985]: I0125 00:09:15.314853 4985 generic.go:334] "Generic (PLEG): container finished" podID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerID="0e8f2782261f9d349ab9b076ec7ba6e4cc1b1621871372bbe096931f7e0941bd" exitCode=0 Jan 25 00:09:15 crc kubenswrapper[4985]: I0125 00:09:15.314895 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" event={"ID":"234cec4e-fc7e-4a34-b638-f1cc49fb2299","Type":"ContainerDied","Data":"0e8f2782261f9d349ab9b076ec7ba6e4cc1b1621871372bbe096931f7e0941bd"} Jan 25 00:09:16 crc kubenswrapper[4985]: I0125 00:09:16.908521 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:09:17 crc kubenswrapper[4985]: I0125 00:09:17.980905 4985 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mntqm container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Jan 25 00:09:17 crc kubenswrapper[4985]: I0125 00:09:17.980974 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Jan 25 00:09:18 crc kubenswrapper[4985]: I0125 00:09:18.483240 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-54sg5" Jan 25 00:09:18 crc kubenswrapper[4985]: I0125 00:09:18.492707 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:09:18 crc kubenswrapper[4985]: I0125 00:09:18.497574 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-54vvw" Jan 25 00:09:19 crc kubenswrapper[4985]: I0125 00:09:19.790508 4985 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-92txc container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Jan 25 00:09:19 crc kubenswrapper[4985]: I0125 00:09:19.791102 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Jan 25 00:09:24 crc kubenswrapper[4985]: I0125 00:09:24.392073 4985 generic.go:334] "Generic (PLEG): container finished" podID="cdc7acca-bf54-44d2-986b-10ecfb1a0abd" containerID="2715347b0cad6a0cfee5589aefe5e0abd8991ac05ac51be5ccf6a125fed1f876" exitCode=0 Jan 25 00:09:24 crc kubenswrapper[4985]: I0125 00:09:24.392132 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29488320-65m92" event={"ID":"cdc7acca-bf54-44d2-986b-10ecfb1a0abd","Type":"ContainerDied","Data":"2715347b0cad6a0cfee5589aefe5e0abd8991ac05ac51be5ccf6a125fed1f876"} Jan 25 00:09:28 crc kubenswrapper[4985]: I0125 00:09:28.980455 4985 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-mntqm container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:09:28 crc kubenswrapper[4985]: I0125 00:09:28.980872 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 25 00:09:29 crc kubenswrapper[4985]: I0125 00:09:29.791162 4985 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-92txc container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" start-of-body= Jan 25 00:09:29 crc kubenswrapper[4985]: I0125 00:09:29.791247 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": dial tcp 10.217.0.19:8443: connect: connection refused" Jan 25 00:09:29 crc kubenswrapper[4985]: I0125 00:09:29.828367 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-cjjtf" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.093574 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.095151 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179618 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htvrt\" (UniqueName: \"kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt\") pod \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179662 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert\") pod \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179731 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config\") pod \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179779 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk4hg\" (UniqueName: \"kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg\") pod \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179804 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles\") pod \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179854 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca\") pod \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\" (UID: \"234cec4e-fc7e-4a34-b638-f1cc49fb2299\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.179881 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca\") pod \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\" (UID: \"cdc7acca-bf54-44d2-986b-10ecfb1a0abd\") " Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.180748 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca" (OuterVolumeSpecName: "client-ca") pod "234cec4e-fc7e-4a34-b638-f1cc49fb2299" (UID: "234cec4e-fc7e-4a34-b638-f1cc49fb2299"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.180878 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca" (OuterVolumeSpecName: "serviceca") pod "cdc7acca-bf54-44d2-986b-10ecfb1a0abd" (UID: "cdc7acca-bf54-44d2-986b-10ecfb1a0abd"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.180774 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "234cec4e-fc7e-4a34-b638-f1cc49fb2299" (UID: "234cec4e-fc7e-4a34-b638-f1cc49fb2299"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.180905 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config" (OuterVolumeSpecName: "config") pod "234cec4e-fc7e-4a34-b638-f1cc49fb2299" (UID: "234cec4e-fc7e-4a34-b638-f1cc49fb2299"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.186154 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "234cec4e-fc7e-4a34-b638-f1cc49fb2299" (UID: "234cec4e-fc7e-4a34-b638-f1cc49fb2299"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.186277 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg" (OuterVolumeSpecName: "kube-api-access-mk4hg") pod "cdc7acca-bf54-44d2-986b-10ecfb1a0abd" (UID: "cdc7acca-bf54-44d2-986b-10ecfb1a0abd"). InnerVolumeSpecName "kube-api-access-mk4hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.192626 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt" (OuterVolumeSpecName: "kube-api-access-htvrt") pod "234cec4e-fc7e-4a34-b638-f1cc49fb2299" (UID: "234cec4e-fc7e-4a34-b638-f1cc49fb2299"). InnerVolumeSpecName "kube-api-access-htvrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281086 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281141 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281152 4985 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-serviceca\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281162 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htvrt\" (UniqueName: \"kubernetes.io/projected/234cec4e-fc7e-4a34-b638-f1cc49fb2299-kube-api-access-htvrt\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281173 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/234cec4e-fc7e-4a34-b638-f1cc49fb2299-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281182 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/234cec4e-fc7e-4a34-b638-f1cc49fb2299-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.281193 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk4hg\" (UniqueName: \"kubernetes.io/projected/cdc7acca-bf54-44d2-986b-10ecfb1a0abd-kube-api-access-mk4hg\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.432504 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" event={"ID":"234cec4e-fc7e-4a34-b638-f1cc49fb2299","Type":"ContainerDied","Data":"af8dac16149ad7a929e8951bc49acdbb20c65daf1c79556f97ef2a54c3a5ebcf"} Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.432568 4985 scope.go:117] "RemoveContainer" containerID="0e8f2782261f9d349ab9b076ec7ba6e4cc1b1621871372bbe096931f7e0941bd" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.432603 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-mntqm" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.436649 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-pruner-29488320-65m92" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.436565 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-pruner-29488320-65m92" event={"ID":"cdc7acca-bf54-44d2-986b-10ecfb1a0abd","Type":"ContainerDied","Data":"f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f"} Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.439269 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f58ada542a2a644a947eea731c093ae4b6b74c8d5ee380329ce5e1eeaee4853f" Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.465815 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:09:31 crc kubenswrapper[4985]: I0125 00:09:31.467625 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-mntqm"] Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.282222 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" path="/var/lib/kubelet/pods/234cec4e-fc7e-4a34-b638-f1cc49fb2299/volumes" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.866944 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:09:32 crc kubenswrapper[4985]: E0125 00:09:32.867240 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867259 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" Jan 25 00:09:32 crc kubenswrapper[4985]: E0125 00:09:32.867276 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ab4c861-cbe1-488d-888c-53221ba05a24" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867285 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ab4c861-cbe1-488d-888c-53221ba05a24" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: E0125 00:09:32.867303 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="677a7eeb-960f-4771-bd2f-9fedef723ffd" containerName="collect-profiles" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867311 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="677a7eeb-960f-4771-bd2f-9fedef723ffd" containerName="collect-profiles" Jan 25 00:09:32 crc kubenswrapper[4985]: E0125 00:09:32.867324 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efb9d4d4-9c08-4eca-a43d-c7ee737f1788" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867332 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="efb9d4d4-9c08-4eca-a43d-c7ee737f1788" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: E0125 00:09:32.867342 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdc7acca-bf54-44d2-986b-10ecfb1a0abd" containerName="image-pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867349 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdc7acca-bf54-44d2-986b-10ecfb1a0abd" containerName="image-pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867492 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ab4c861-cbe1-488d-888c-53221ba05a24" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867508 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="677a7eeb-960f-4771-bd2f-9fedef723ffd" containerName="collect-profiles" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867518 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdc7acca-bf54-44d2-986b-10ecfb1a0abd" containerName="image-pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867527 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="efb9d4d4-9c08-4eca-a43d-c7ee737f1788" containerName="pruner" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867536 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="234cec4e-fc7e-4a34-b638-f1cc49fb2299" containerName="controller-manager" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.867969 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.870376 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.870635 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.870871 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.870931 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.871555 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.871766 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.880270 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:09:32 crc kubenswrapper[4985]: I0125 00:09:32.880277 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.002658 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvks6\" (UniqueName: \"kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.002724 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.002768 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.002797 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.002823 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.103783 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.103843 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.103870 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.103899 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.103934 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvks6\" (UniqueName: \"kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.105188 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.105689 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.112884 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:33 crc kubenswrapper[4985]: I0125 00:09:33.131828 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvks6\" (UniqueName: \"kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:34 crc kubenswrapper[4985]: I0125 00:09:34.607634 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles\") pod \"controller-manager-86dc859c9b-98klj\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:34 crc kubenswrapper[4985]: I0125 00:09:34.685620 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:34 crc kubenswrapper[4985]: E0125 00:09:34.918854 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 25 00:09:34 crc kubenswrapper[4985]: E0125 00:09:34.919042 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z89zm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-zcf2n_openshift-marketplace(edab77a9-c9b1-44b8-8b21-275fc3bcdd81): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:34 crc kubenswrapper[4985]: E0125 00:09:34.920351 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-zcf2n" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" Jan 25 00:09:35 crc kubenswrapper[4985]: I0125 00:09:35.836863 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:09:35 crc kubenswrapper[4985]: I0125 00:09:35.836914 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:09:37 crc kubenswrapper[4985]: I0125 00:09:37.505369 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.261943 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.263879 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.267206 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.267545 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.284939 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.380227 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.380292 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.482068 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.482213 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.482457 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.506896 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:38 crc kubenswrapper[4985]: I0125 00:09:38.583249 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:40 crc kubenswrapper[4985]: I0125 00:09:40.790287 4985 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-92txc container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:09:40 crc kubenswrapper[4985]: I0125 00:09:40.790651 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.19:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 25 00:09:40 crc kubenswrapper[4985]: E0125 00:09:40.841477 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-zcf2n" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" Jan 25 00:09:41 crc kubenswrapper[4985]: E0125 00:09:41.233268 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 25 00:09:41 crc kubenswrapper[4985]: E0125 00:09:41.233442 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fr4hm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-pp7k8_openshift-marketplace(4daece71-11c1-4ef2-8cae-ff8e392d1abe): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:41 crc kubenswrapper[4985]: E0125 00:09:41.236464 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-pp7k8" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.493353 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-pp7k8" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.567979 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.568180 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g8s22,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-czq57_openshift-marketplace(5fcaee3d-2838-4823-b0fd-f6285ebfe74c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.569267 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.569355 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-czq57" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.569424 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tm5cl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-77pvn_openshift-marketplace(badea0b3-377c-4171-931a-2fc2a9a07922): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.570850 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-77pvn" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.626528 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.626679 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xzwjt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-nsrmt_openshift-marketplace(841f3be9-8a92-4e9e-af89-ddf60ffc736e): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:42 crc kubenswrapper[4985]: E0125 00:09:42.628908 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-nsrmt" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.662291 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.663105 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.666759 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.738946 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.738993 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.739022 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.840454 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.840520 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.840583 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.840936 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.840983 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.858513 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access\") pod \"installer-9-crc\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:42 crc kubenswrapper[4985]: I0125 00:09:42.980580 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.803526 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-77pvn" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.803544 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-nsrmt" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.803544 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-czq57" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.865440 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.865779 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nz6sd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-rl7bj_openshift-marketplace(7a26fe5e-9560-455a-a98e-6185e89ee607): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.867342 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-rl7bj" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.885663 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.885816 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vg4fk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4q65s_openshift-marketplace(ebb7e09a-b77f-4c72-b892-177ebd17417c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.887165 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4q65s" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.910568 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.910743 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cjkcr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-d5j6f_openshift-marketplace(5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.913162 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-d5j6f" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" Jan 25 00:09:43 crc kubenswrapper[4985]: I0125 00:09:43.948405 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:09:43 crc kubenswrapper[4985]: I0125 00:09:43.989342 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:09:43 crc kubenswrapper[4985]: E0125 00:09:43.989564 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" Jan 25 00:09:43 crc kubenswrapper[4985]: I0125 00:09:43.989575 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" Jan 25 00:09:43 crc kubenswrapper[4985]: I0125 00:09:43.989678 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" containerName="route-controller-manager" Jan 25 00:09:43 crc kubenswrapper[4985]: I0125 00:09:43.990016 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.040509 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058525 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert\") pod \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058582 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca\") pod \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058615 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config\") pod \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058634 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qbvk\" (UniqueName: \"kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk\") pod \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\" (UID: \"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65\") " Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058787 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058832 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058871 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.058895 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4wxt\" (UniqueName: \"kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.060278 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config" (OuterVolumeSpecName: "config") pod "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" (UID: "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.060479 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca" (OuterVolumeSpecName: "client-ca") pod "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" (UID: "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.067651 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" (UID: "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.067818 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk" (OuterVolumeSpecName: "kube-api-access-8qbvk") pod "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" (UID: "c6ea2c96-caaa-4e9f-816a-fe2f63dedd65"). InnerVolumeSpecName "kube-api-access-8qbvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160388 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160440 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4wxt\" (UniqueName: \"kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160491 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160521 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160552 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160563 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160572 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.160583 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qbvk\" (UniqueName: \"kubernetes.io/projected/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65-kube-api-access-8qbvk\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.161635 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.162163 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.169165 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.179330 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4wxt\" (UniqueName: \"kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt\") pod \"route-controller-manager-69df57b744-b6ddr\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.267549 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-cqtvp"] Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.314413 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.339941 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.360977 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.371864 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 25 00:09:44 crc kubenswrapper[4985]: W0125 00:09:44.373948 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod83b165bc_18cd_43cb_9c88_18b18c31229d.slice/crio-0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f WatchSource:0}: Error finding container 0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f: Status 404 returned error can't find the container with id 0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.512539 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.521201 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" event={"ID":"c6ea2c96-caaa-4e9f-816a-fe2f63dedd65","Type":"ContainerDied","Data":"a6a6f427c4fd072105e15777859e1f9973ae0c4a8fc53935c105c643c50aa51e"} Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.521265 4985 scope.go:117] "RemoveContainer" containerID="df3ea0dc4da8b50dcb1f77120ef92b796ef5e39cf1346b1ed775d8ae41c74e88" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.521402 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc" Jan 25 00:09:44 crc kubenswrapper[4985]: W0125 00:09:44.523440 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2209b252_4f5b_4f96_bb46_e006fe279367.slice/crio-5743fc852188b8b93be36f39fa9f75923970c70860110e424801cdcfed287dcd WatchSource:0}: Error finding container 5743fc852188b8b93be36f39fa9f75923970c70860110e424801cdcfed287dcd: Status 404 returned error can't find the container with id 5743fc852188b8b93be36f39fa9f75923970c70860110e424801cdcfed287dcd Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.526935 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" event={"ID":"61e8a524-c951-4dbd-9f9d-e3698af5e4b5","Type":"ContainerStarted","Data":"a011e2baa8f7cdadda5f6cfe3a7cce03ff37b9bbcaafb94c237a0956c0c8c726"} Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.529985 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"83b165bc-18cd-43cb-9c88-18b18c31229d","Type":"ContainerStarted","Data":"0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f"} Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.531309 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" event={"ID":"39723ce0-614f-4ada-9cc7-6efe79c7e51c","Type":"ContainerStarted","Data":"5fc2fe4f6eb4acac83fb1d7842c12c6af9da9e45554c57db1ec7c69db327f3e9"} Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.534460 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4282f471-0e21-407f-af03-77319f7ceb9e","Type":"ContainerStarted","Data":"4dbc953d26145b6fdddd849b2811577b4d3871d2942ee77c2e07dd4eeea5eae6"} Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.547027 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:09:44 crc kubenswrapper[4985]: E0125 00:09:44.550441 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-d5j6f" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" Jan 25 00:09:44 crc kubenswrapper[4985]: E0125 00:09:44.550599 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-rl7bj" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" Jan 25 00:09:44 crc kubenswrapper[4985]: I0125 00:09:44.550864 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-92txc"] Jan 25 00:09:44 crc kubenswrapper[4985]: E0125 00:09:44.552257 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4q65s" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.544803 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" event={"ID":"61e8a524-c951-4dbd-9f9d-e3698af5e4b5","Type":"ContainerStarted","Data":"0019a234f48255509cda5a41f6e49130627ec28d5950f97e2f7870bc29721dba"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.545486 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.547211 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" event={"ID":"2209b252-4f5b-4f96-bb46-e006fe279367","Type":"ContainerStarted","Data":"aca9715646fa00173b750e44d1e5ec1b0f28091aeec46e1f644da3b5b9072661"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.547299 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" event={"ID":"2209b252-4f5b-4f96-bb46-e006fe279367","Type":"ContainerStarted","Data":"5743fc852188b8b93be36f39fa9f75923970c70860110e424801cdcfed287dcd"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.547333 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.553239 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.554764 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"83b165bc-18cd-43cb-9c88-18b18c31229d","Type":"ContainerStarted","Data":"ae710686592490a7bd47cc94649616ce5041440e049e28416fdf9ded696d9533"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.554982 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.557690 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" event={"ID":"39723ce0-614f-4ada-9cc7-6efe79c7e51c","Type":"ContainerStarted","Data":"90a4a3e6ed6d43f8dfac8293e9989bb4900026ea646ee16de5c9f90b374335b9"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.557726 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-cqtvp" event={"ID":"39723ce0-614f-4ada-9cc7-6efe79c7e51c","Type":"ContainerStarted","Data":"a9671ac0243b9cc6d725ff3e7ae7006216344d745285c3ebd0400a54cdef8c43"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.566405 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" podStartSLOduration=13.566381387 podStartE2EDuration="13.566381387s" podCreationTimestamp="2026-01-25 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:09:45.560261916 +0000 UTC m=+195.592198219" watchObservedRunningTime="2026-01-25 00:09:45.566381387 +0000 UTC m=+195.598317670" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.569892 4985 generic.go:334] "Generic (PLEG): container finished" podID="4282f471-0e21-407f-af03-77319f7ceb9e" containerID="64f0053ab6f06f0d7221a3b495d2eb7af6988bb02d246805b442d0b967894105" exitCode=0 Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.569953 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4282f471-0e21-407f-af03-77319f7ceb9e","Type":"ContainerDied","Data":"64f0053ab6f06f0d7221a3b495d2eb7af6988bb02d246805b442d0b967894105"} Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.586743 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" podStartSLOduration=13.586720364 podStartE2EDuration="13.586720364s" podCreationTimestamp="2026-01-25 00:09:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:09:45.582179784 +0000 UTC m=+195.614116097" watchObservedRunningTime="2026-01-25 00:09:45.586720364 +0000 UTC m=+195.618656677" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.619904 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-cqtvp" podStartSLOduration=176.619880407 podStartE2EDuration="2m56.619880407s" podCreationTimestamp="2026-01-25 00:06:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:09:45.61201379 +0000 UTC m=+195.643950073" watchObservedRunningTime="2026-01-25 00:09:45.619880407 +0000 UTC m=+195.651816690" Jan 25 00:09:45 crc kubenswrapper[4985]: I0125 00:09:45.635676 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.635655103 podStartE2EDuration="3.635655103s" podCreationTimestamp="2026-01-25 00:09:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:09:45.632583692 +0000 UTC m=+195.664519965" watchObservedRunningTime="2026-01-25 00:09:45.635655103 +0000 UTC m=+195.667591396" Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.287225 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6ea2c96-caaa-4e9f-816a-fe2f63dedd65" path="/var/lib/kubelet/pods/c6ea2c96-caaa-4e9f-816a-fe2f63dedd65/volumes" Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.845535 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.902608 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access\") pod \"4282f471-0e21-407f-af03-77319f7ceb9e\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.902756 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir\") pod \"4282f471-0e21-407f-af03-77319f7ceb9e\" (UID: \"4282f471-0e21-407f-af03-77319f7ceb9e\") " Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.902881 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4282f471-0e21-407f-af03-77319f7ceb9e" (UID: "4282f471-0e21-407f-af03-77319f7ceb9e"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.903171 4985 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4282f471-0e21-407f-af03-77319f7ceb9e-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:46 crc kubenswrapper[4985]: I0125 00:09:46.911258 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4282f471-0e21-407f-af03-77319f7ceb9e" (UID: "4282f471-0e21-407f-af03-77319f7ceb9e"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:09:47 crc kubenswrapper[4985]: I0125 00:09:47.003963 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4282f471-0e21-407f-af03-77319f7ceb9e-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:09:47 crc kubenswrapper[4985]: I0125 00:09:47.583004 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"4282f471-0e21-407f-af03-77319f7ceb9e","Type":"ContainerDied","Data":"4dbc953d26145b6fdddd849b2811577b4d3871d2942ee77c2e07dd4eeea5eae6"} Jan 25 00:09:47 crc kubenswrapper[4985]: I0125 00:09:47.583291 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4dbc953d26145b6fdddd849b2811577b4d3871d2942ee77c2e07dd4eeea5eae6" Jan 25 00:09:47 crc kubenswrapper[4985]: I0125 00:09:47.583056 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 25 00:09:55 crc kubenswrapper[4985]: I0125 00:09:55.627158 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerStarted","Data":"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6"} Jan 25 00:09:56 crc kubenswrapper[4985]: I0125 00:09:56.635060 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerStarted","Data":"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5"} Jan 25 00:09:56 crc kubenswrapper[4985]: I0125 00:09:56.638360 4985 generic.go:334] "Generic (PLEG): container finished" podID="badea0b3-377c-4171-931a-2fc2a9a07922" containerID="84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6" exitCode=0 Jan 25 00:09:56 crc kubenswrapper[4985]: I0125 00:09:56.638439 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerDied","Data":"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6"} Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.646671 4985 generic.go:334] "Generic (PLEG): container finished" podID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerID="4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5" exitCode=0 Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.646797 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerDied","Data":"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5"} Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.649609 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerStarted","Data":"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8"} Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.652211 4985 generic.go:334] "Generic (PLEG): container finished" podID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerID="61e48fefeef57ef9c38609adc0f05786b396c72379746a9fafcad011d9558024" exitCode=0 Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.652252 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerDied","Data":"61e48fefeef57ef9c38609adc0f05786b396c72379746a9fafcad011d9558024"} Jan 25 00:09:57 crc kubenswrapper[4985]: I0125 00:09:57.713394 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-77pvn" podStartSLOduration=3.392573529 podStartE2EDuration="1m2.713357401s" podCreationTimestamp="2026-01-25 00:08:55 +0000 UTC" firstStartedPulling="2026-01-25 00:08:57.806634865 +0000 UTC m=+147.838571138" lastFinishedPulling="2026-01-25 00:09:57.127418737 +0000 UTC m=+207.159355010" observedRunningTime="2026-01-25 00:09:57.704308866 +0000 UTC m=+207.736245139" watchObservedRunningTime="2026-01-25 00:09:57.713357401 +0000 UTC m=+207.745293694" Jan 25 00:09:58 crc kubenswrapper[4985]: I0125 00:09:58.658415 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerStarted","Data":"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.668673 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerStarted","Data":"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.673184 4985 generic.go:334] "Generic (PLEG): container finished" podID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerID="313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba" exitCode=0 Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.673226 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerDied","Data":"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.675252 4985 generic.go:334] "Generic (PLEG): container finished" podID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerID="f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01" exitCode=0 Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.675287 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerDied","Data":"f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.678970 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerStarted","Data":"1ad1dce0aa1ce2c127a321405ccf6ad0282a9ba0f5659dd3abc4e1c44a527b41"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.687250 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-zcf2n" podStartSLOduration=3.219785692 podStartE2EDuration="1m1.687237744s" podCreationTimestamp="2026-01-25 00:08:58 +0000 UTC" firstStartedPulling="2026-01-25 00:08:59.920989882 +0000 UTC m=+149.952926155" lastFinishedPulling="2026-01-25 00:09:58.388441934 +0000 UTC m=+208.420378207" observedRunningTime="2026-01-25 00:09:59.686421183 +0000 UTC m=+209.718357456" watchObservedRunningTime="2026-01-25 00:09:59.687237744 +0000 UTC m=+209.719174017" Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.692316 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerStarted","Data":"951f0125e25cce799343a1415fa867db97019ba36ca400387ab02c048454a87f"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.696389 4985 generic.go:334] "Generic (PLEG): container finished" podID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerID="f657e03c8e04d849c1f9e17c73065dde579d3fc52d03ceb863559c4258cd847f" exitCode=0 Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.696425 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerDied","Data":"f657e03c8e04d849c1f9e17c73065dde579d3fc52d03ceb863559c4258cd847f"} Jan 25 00:09:59 crc kubenswrapper[4985]: I0125 00:09:59.753576 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-pp7k8" podStartSLOduration=3.999407377 podStartE2EDuration="1m4.75355903s" podCreationTimestamp="2026-01-25 00:08:55 +0000 UTC" firstStartedPulling="2026-01-25 00:08:57.721675046 +0000 UTC m=+147.753611319" lastFinishedPulling="2026-01-25 00:09:58.475826699 +0000 UTC m=+208.507762972" observedRunningTime="2026-01-25 00:09:59.752948063 +0000 UTC m=+209.784884366" watchObservedRunningTime="2026-01-25 00:09:59.75355903 +0000 UTC m=+209.785495313" Jan 25 00:10:00 crc kubenswrapper[4985]: I0125 00:10:00.705456 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerStarted","Data":"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13"} Jan 25 00:10:00 crc kubenswrapper[4985]: I0125 00:10:00.707658 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerID="951f0125e25cce799343a1415fa867db97019ba36ca400387ab02c048454a87f" exitCode=0 Jan 25 00:10:00 crc kubenswrapper[4985]: I0125 00:10:00.707681 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerDied","Data":"951f0125e25cce799343a1415fa867db97019ba36ca400387ab02c048454a87f"} Jan 25 00:10:00 crc kubenswrapper[4985]: I0125 00:10:00.725710 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nsrmt" podStartSLOduration=3.451421778 podStartE2EDuration="1m5.725698361s" podCreationTimestamp="2026-01-25 00:08:55 +0000 UTC" firstStartedPulling="2026-01-25 00:08:57.829892998 +0000 UTC m=+147.861829261" lastFinishedPulling="2026-01-25 00:10:00.104169571 +0000 UTC m=+210.136105844" observedRunningTime="2026-01-25 00:10:00.721617974 +0000 UTC m=+210.753554267" watchObservedRunningTime="2026-01-25 00:10:00.725698361 +0000 UTC m=+210.757634654" Jan 25 00:10:01 crc kubenswrapper[4985]: I0125 00:10:01.741464 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerStarted","Data":"d69c039970c23266548e9880303712b2e3048a641c8c3bea1406f992830887d1"} Jan 25 00:10:01 crc kubenswrapper[4985]: I0125 00:10:01.743403 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerStarted","Data":"09563aeacf0b86864daef3fb4fda098ba86247eaf80d820b83ce0a3b4b8aceda"} Jan 25 00:10:01 crc kubenswrapper[4985]: I0125 00:10:01.759011 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-czq57" podStartSLOduration=4.083154988 podStartE2EDuration="1m6.758996543s" podCreationTimestamp="2026-01-25 00:08:55 +0000 UTC" firstStartedPulling="2026-01-25 00:08:57.72788156 +0000 UTC m=+147.759817833" lastFinishedPulling="2026-01-25 00:10:00.403723115 +0000 UTC m=+210.435659388" observedRunningTime="2026-01-25 00:10:01.756193431 +0000 UTC m=+211.788129714" watchObservedRunningTime="2026-01-25 00:10:01.758996543 +0000 UTC m=+211.790932816" Jan 25 00:10:01 crc kubenswrapper[4985]: I0125 00:10:01.777071 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4q65s" podStartSLOduration=4.289363752 podStartE2EDuration="1m4.777055643s" podCreationTimestamp="2026-01-25 00:08:57 +0000 UTC" firstStartedPulling="2026-01-25 00:08:59.949851713 +0000 UTC m=+149.981787986" lastFinishedPulling="2026-01-25 00:10:00.437543584 +0000 UTC m=+210.469479877" observedRunningTime="2026-01-25 00:10:01.773514781 +0000 UTC m=+211.805451074" watchObservedRunningTime="2026-01-25 00:10:01.777055643 +0000 UTC m=+211.808991916" Jan 25 00:10:02 crc kubenswrapper[4985]: I0125 00:10:02.752412 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerStarted","Data":"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a"} Jan 25 00:10:03 crc kubenswrapper[4985]: I0125 00:10:03.790479 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d5j6f" podStartSLOduration=5.4183883 podStartE2EDuration="1m5.790464724s" podCreationTimestamp="2026-01-25 00:08:58 +0000 UTC" firstStartedPulling="2026-01-25 00:09:00.975406193 +0000 UTC m=+151.007342466" lastFinishedPulling="2026-01-25 00:10:01.347482617 +0000 UTC m=+211.379418890" observedRunningTime="2026-01-25 00:10:03.78955197 +0000 UTC m=+213.821488243" watchObservedRunningTime="2026-01-25 00:10:03.790464724 +0000 UTC m=+213.822400997" Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.835856 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.836312 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.836366 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.836996 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.837140 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585" gracePeriod=600 Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.861558 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:10:05 crc kubenswrapper[4985]: I0125 00:10:05.861644 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.016437 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.016491 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.090420 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.090952 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.261347 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.261626 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.262341 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.289324 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.289380 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.330403 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.795329 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585" exitCode=0 Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.795415 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585"} Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.858571 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.861720 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:06 crc kubenswrapper[4985]: I0125 00:10:06.862562 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:07 crc kubenswrapper[4985]: I0125 00:10:07.174458 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.262931 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.263324 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.302811 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.509327 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.718251 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.781957 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.782111 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.848750 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-czq57" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="registry-server" containerID="cri-o://d69c039970c23266548e9880303712b2e3048a641c8c3bea1406f992830887d1" gracePeriod=2 Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.885396 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:10:08 crc kubenswrapper[4985]: I0125 00:10:08.894232 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:09 crc kubenswrapper[4985]: I0125 00:10:09.209365 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:10:09 crc kubenswrapper[4985]: I0125 00:10:09.209416 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:10:09 crc kubenswrapper[4985]: I0125 00:10:09.856615 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-pp7k8" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="registry-server" containerID="cri-o://1ad1dce0aa1ce2c127a321405ccf6ad0282a9ba0f5659dd3abc4e1c44a527b41" gracePeriod=2 Jan 25 00:10:09 crc kubenswrapper[4985]: I0125 00:10:09.917980 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:10:10 crc kubenswrapper[4985]: I0125 00:10:10.273037 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d5j6f" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" probeResult="failure" output=< Jan 25 00:10:10 crc kubenswrapper[4985]: timeout: failed to connect service ":50051" within 1s Jan 25 00:10:10 crc kubenswrapper[4985]: > Jan 25 00:10:10 crc kubenswrapper[4985]: I0125 00:10:10.914136 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:10:10 crc kubenswrapper[4985]: I0125 00:10:10.915246 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4q65s" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="registry-server" containerID="cri-o://09563aeacf0b86864daef3fb4fda098ba86247eaf80d820b83ce0a3b4b8aceda" gracePeriod=2 Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.837174 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.837481 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerName="controller-manager" containerID="cri-o://0019a234f48255509cda5a41f6e49130627ec28d5950f97e2f7870bc29721dba" gracePeriod=30 Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.880954 4985 generic.go:334] "Generic (PLEG): container finished" podID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerID="1ad1dce0aa1ce2c127a321405ccf6ad0282a9ba0f5659dd3abc4e1c44a527b41" exitCode=0 Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.881049 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerDied","Data":"1ad1dce0aa1ce2c127a321405ccf6ad0282a9ba0f5659dd3abc4e1c44a527b41"} Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.883969 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerID="d69c039970c23266548e9880303712b2e3048a641c8c3bea1406f992830887d1" exitCode=0 Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.884019 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerDied","Data":"d69c039970c23266548e9880303712b2e3048a641c8c3bea1406f992830887d1"} Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.937728 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:10:12 crc kubenswrapper[4985]: I0125 00:10:12.937933 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" containerName="route-controller-manager" containerID="cri-o://aca9715646fa00173b750e44d1e5ec1b0f28091aeec46e1f644da3b5b9072661" gracePeriod=30 Jan 25 00:10:13 crc kubenswrapper[4985]: I0125 00:10:13.890665 4985 generic.go:334] "Generic (PLEG): container finished" podID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerID="09563aeacf0b86864daef3fb4fda098ba86247eaf80d820b83ce0a3b4b8aceda" exitCode=0 Jan 25 00:10:13 crc kubenswrapper[4985]: I0125 00:10:13.890709 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerDied","Data":"09563aeacf0b86864daef3fb4fda098ba86247eaf80d820b83ce0a3b4b8aceda"} Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.315034 4985 patch_prober.go:28] interesting pod/route-controller-manager-69df57b744-b6ddr container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" start-of-body= Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.315413 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": dial tcp 10.217.0.58:8443: connect: connection refused" Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.686247 4985 patch_prober.go:28] interesting pod/controller-manager-86dc859c9b-98klj container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" start-of-body= Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.686303 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": dial tcp 10.217.0.55:8443: connect: connection refused" Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.898287 4985 generic.go:334] "Generic (PLEG): container finished" podID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerID="0019a234f48255509cda5a41f6e49130627ec28d5950f97e2f7870bc29721dba" exitCode=0 Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.898380 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" event={"ID":"61e8a524-c951-4dbd-9f9d-e3698af5e4b5","Type":"ContainerDied","Data":"0019a234f48255509cda5a41f6e49130627ec28d5950f97e2f7870bc29721dba"} Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.900353 4985 generic.go:334] "Generic (PLEG): container finished" podID="2209b252-4f5b-4f96-bb46-e006fe279367" containerID="aca9715646fa00173b750e44d1e5ec1b0f28091aeec46e1f644da3b5b9072661" exitCode=0 Jan 25 00:10:14 crc kubenswrapper[4985]: I0125 00:10:14.900398 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" event={"ID":"2209b252-4f5b-4f96-bb46-e006fe279367","Type":"ContainerDied","Data":"aca9715646fa00173b750e44d1e5ec1b0f28091aeec46e1f644da3b5b9072661"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.146691 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.154550 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.163498 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.241760 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vg4fk\" (UniqueName: \"kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk\") pod \"ebb7e09a-b77f-4c72-b892-177ebd17417c\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243233 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content\") pod \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243508 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content\") pod \"ebb7e09a-b77f-4c72-b892-177ebd17417c\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243574 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities\") pod \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243592 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g8s22\" (UniqueName: \"kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22\") pod \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\" (UID: \"5fcaee3d-2838-4823-b0fd-f6285ebfe74c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243630 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities\") pod \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.243991 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities\") pod \"ebb7e09a-b77f-4c72-b892-177ebd17417c\" (UID: \"ebb7e09a-b77f-4c72-b892-177ebd17417c\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.244043 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fr4hm\" (UniqueName: \"kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm\") pod \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.244094 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content\") pod \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\" (UID: \"4daece71-11c1-4ef2-8cae-ff8e392d1abe\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.244264 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities" (OuterVolumeSpecName: "utilities") pod "5fcaee3d-2838-4823-b0fd-f6285ebfe74c" (UID: "5fcaee3d-2838-4823-b0fd-f6285ebfe74c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.244498 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities" (OuterVolumeSpecName: "utilities") pod "4daece71-11c1-4ef2-8cae-ff8e392d1abe" (UID: "4daece71-11c1-4ef2-8cae-ff8e392d1abe"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.244530 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.245108 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities" (OuterVolumeSpecName: "utilities") pod "ebb7e09a-b77f-4c72-b892-177ebd17417c" (UID: "ebb7e09a-b77f-4c72-b892-177ebd17417c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.250277 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk" (OuterVolumeSpecName: "kube-api-access-vg4fk") pod "ebb7e09a-b77f-4c72-b892-177ebd17417c" (UID: "ebb7e09a-b77f-4c72-b892-177ebd17417c"). InnerVolumeSpecName "kube-api-access-vg4fk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.252644 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22" (OuterVolumeSpecName: "kube-api-access-g8s22") pod "5fcaee3d-2838-4823-b0fd-f6285ebfe74c" (UID: "5fcaee3d-2838-4823-b0fd-f6285ebfe74c"). InnerVolumeSpecName "kube-api-access-g8s22". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.254372 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm" (OuterVolumeSpecName: "kube-api-access-fr4hm") pod "4daece71-11c1-4ef2-8cae-ff8e392d1abe" (UID: "4daece71-11c1-4ef2-8cae-ff8e392d1abe"). InnerVolumeSpecName "kube-api-access-fr4hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.273266 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ebb7e09a-b77f-4c72-b892-177ebd17417c" (UID: "ebb7e09a-b77f-4c72-b892-177ebd17417c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.338404 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4daece71-11c1-4ef2-8cae-ff8e392d1abe" (UID: "4daece71-11c1-4ef2-8cae-ff8e392d1abe"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345882 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345901 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vg4fk\" (UniqueName: \"kubernetes.io/projected/ebb7e09a-b77f-4c72-b892-177ebd17417c-kube-api-access-vg4fk\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345911 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345920 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g8s22\" (UniqueName: \"kubernetes.io/projected/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-kube-api-access-g8s22\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345929 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4daece71-11c1-4ef2-8cae-ff8e392d1abe-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345937 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ebb7e09a-b77f-4c72-b892-177ebd17417c-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.345946 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fr4hm\" (UniqueName: \"kubernetes.io/projected/4daece71-11c1-4ef2-8cae-ff8e392d1abe-kube-api-access-fr4hm\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.542379 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.548703 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert\") pod \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.548759 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config\") pod \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.548844 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca\") pod \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.548882 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles\") pod \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.548901 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvks6\" (UniqueName: \"kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6\") pod \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\" (UID: \"61e8a524-c951-4dbd-9f9d-e3698af5e4b5\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.550829 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca" (OuterVolumeSpecName: "client-ca") pod "61e8a524-c951-4dbd-9f9d-e3698af5e4b5" (UID: "61e8a524-c951-4dbd-9f9d-e3698af5e4b5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.550901 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "61e8a524-c951-4dbd-9f9d-e3698af5e4b5" (UID: "61e8a524-c951-4dbd-9f9d-e3698af5e4b5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.550987 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config" (OuterVolumeSpecName: "config") pod "61e8a524-c951-4dbd-9f9d-e3698af5e4b5" (UID: "61e8a524-c951-4dbd-9f9d-e3698af5e4b5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.553213 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "61e8a524-c951-4dbd-9f9d-e3698af5e4b5" (UID: "61e8a524-c951-4dbd-9f9d-e3698af5e4b5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.558302 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6" (OuterVolumeSpecName: "kube-api-access-wvks6") pod "61e8a524-c951-4dbd-9f9d-e3698af5e4b5" (UID: "61e8a524-c951-4dbd-9f9d-e3698af5e4b5"). InnerVolumeSpecName "kube-api-access-wvks6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.590148 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.632302 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5fcaee3d-2838-4823-b0fd-f6285ebfe74c" (UID: "5fcaee3d-2838-4823-b0fd-f6285ebfe74c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.650897 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fcaee3d-2838-4823-b0fd-f6285ebfe74c-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.651159 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.651170 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.651180 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvks6\" (UniqueName: \"kubernetes.io/projected/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-kube-api-access-wvks6\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.651190 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.651198 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/61e8a524-c951-4dbd-9f9d-e3698af5e4b5-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.752281 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca\") pod \"2209b252-4f5b-4f96-bb46-e006fe279367\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.752397 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert\") pod \"2209b252-4f5b-4f96-bb46-e006fe279367\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.752447 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4wxt\" (UniqueName: \"kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt\") pod \"2209b252-4f5b-4f96-bb46-e006fe279367\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.752489 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config\") pod \"2209b252-4f5b-4f96-bb46-e006fe279367\" (UID: \"2209b252-4f5b-4f96-bb46-e006fe279367\") " Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.753175 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca" (OuterVolumeSpecName: "client-ca") pod "2209b252-4f5b-4f96-bb46-e006fe279367" (UID: "2209b252-4f5b-4f96-bb46-e006fe279367"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.753212 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config" (OuterVolumeSpecName: "config") pod "2209b252-4f5b-4f96-bb46-e006fe279367" (UID: "2209b252-4f5b-4f96-bb46-e006fe279367"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.755550 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2209b252-4f5b-4f96-bb46-e006fe279367" (UID: "2209b252-4f5b-4f96-bb46-e006fe279367"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.760408 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt" (OuterVolumeSpecName: "kube-api-access-c4wxt") pod "2209b252-4f5b-4f96-bb46-e006fe279367" (UID: "2209b252-4f5b-4f96-bb46-e006fe279367"). InnerVolumeSpecName "kube-api-access-c4wxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.853686 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2209b252-4f5b-4f96-bb46-e006fe279367-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.853723 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4wxt\" (UniqueName: \"kubernetes.io/projected/2209b252-4f5b-4f96-bb46-e006fe279367-kube-api-access-c4wxt\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.853735 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.853744 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2209b252-4f5b-4f96-bb46-e006fe279367-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.905872 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" event={"ID":"61e8a524-c951-4dbd-9f9d-e3698af5e4b5","Type":"ContainerDied","Data":"a011e2baa8f7cdadda5f6cfe3a7cce03ff37b9bbcaafb94c237a0956c0c8c726"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.905895 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-86dc859c9b-98klj" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.905923 4985 scope.go:117] "RemoveContainer" containerID="0019a234f48255509cda5a41f6e49130627ec28d5950f97e2f7870bc29721dba" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.908828 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" event={"ID":"2209b252-4f5b-4f96-bb46-e006fe279367","Type":"ContainerDied","Data":"5743fc852188b8b93be36f39fa9f75923970c70860110e424801cdcfed287dcd"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.908889 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.910509 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-pp7k8" event={"ID":"4daece71-11c1-4ef2-8cae-ff8e392d1abe","Type":"ContainerDied","Data":"fba643509d226555b244e2ed690fc914b78473fa674d996ffa37ee6af2b1974d"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.910525 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-pp7k8" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.912065 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-czq57" event={"ID":"5fcaee3d-2838-4823-b0fd-f6285ebfe74c","Type":"ContainerDied","Data":"83eb0e9ae8dde25bb90f3a6872d2544296956ae2bcf676cd9e1bac6a6dc5f340"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.912074 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-czq57" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.913874 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4q65s" event={"ID":"ebb7e09a-b77f-4c72-b892-177ebd17417c","Type":"ContainerDied","Data":"92a0e0d5e970e476c4df347560232771c4036c126f762712ae2caaa9d24a8a7d"} Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.913924 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4q65s" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.940830 4985 scope.go:117] "RemoveContainer" containerID="aca9715646fa00173b750e44d1e5ec1b0f28091aeec46e1f644da3b5b9072661" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.951731 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.958700 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-86dc859c9b-98klj"] Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.980291 4985 scope.go:117] "RemoveContainer" containerID="1ad1dce0aa1ce2c127a321405ccf6ad0282a9ba0f5659dd3abc4e1c44a527b41" Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.982589 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:10:15 crc kubenswrapper[4985]: I0125 00:10:15.988776 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-69df57b744-b6ddr"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.001116 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.004829 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-pp7k8"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.007426 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.010004 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-czq57"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.013584 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.015652 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4q65s"] Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.281291 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" path="/var/lib/kubelet/pods/2209b252-4f5b-4f96-bb46-e006fe279367/volumes" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.282122 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" path="/var/lib/kubelet/pods/4daece71-11c1-4ef2-8cae-ff8e392d1abe/volumes" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.282853 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" path="/var/lib/kubelet/pods/5fcaee3d-2838-4823-b0fd-f6285ebfe74c/volumes" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.284097 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" path="/var/lib/kubelet/pods/61e8a524-c951-4dbd-9f9d-e3698af5e4b5/volumes" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.284655 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" path="/var/lib/kubelet/pods/ebb7e09a-b77f-4c72-b892-177ebd17417c/volumes" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.476471 4985 scope.go:117] "RemoveContainer" containerID="61e48fefeef57ef9c38609adc0f05786b396c72379746a9fafcad011d9558024" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.497440 4985 scope.go:117] "RemoveContainer" containerID="ea0dca48057794d9f6e4010d2ef243741875e6e15fa2c6706d5a9cf1abe65cc1" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.519389 4985 scope.go:117] "RemoveContainer" containerID="d69c039970c23266548e9880303712b2e3048a641c8c3bea1406f992830887d1" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.741653 4985 scope.go:117] "RemoveContainer" containerID="951f0125e25cce799343a1415fa867db97019ba36ca400387ab02c048454a87f" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.758936 4985 scope.go:117] "RemoveContainer" containerID="07df81b8a29b83053ae66d807233ba3a4a8e4d972515693b38feba46e70e57a2" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.773581 4985 scope.go:117] "RemoveContainer" containerID="09563aeacf0b86864daef3fb4fda098ba86247eaf80d820b83ce0a3b4b8aceda" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.794153 4985 scope.go:117] "RemoveContainer" containerID="f657e03c8e04d849c1f9e17c73065dde579d3fc52d03ceb863559c4258cd847f" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.806323 4985 scope.go:117] "RemoveContainer" containerID="aa99e429218be279a7fb3f9296fd1c4bdce6196b344e5f85731a8241d93f4b44" Jan 25 00:10:16 crc kubenswrapper[4985]: I0125 00:10:16.930912 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f"} Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.399781 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-29vvw"] Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516093 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516369 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516385 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516395 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516402 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516413 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516419 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516428 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" containerName="route-controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516433 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" containerName="route-controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516441 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516447 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516455 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516460 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516475 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516480 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516489 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516495 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="extract-utilities" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516503 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516509 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516517 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerName="controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516522 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerName="controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516530 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4282f471-0e21-407f-af03-77319f7ceb9e" containerName="pruner" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516537 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="4282f471-0e21-407f-af03-77319f7ceb9e" containerName="pruner" Jan 25 00:10:17 crc kubenswrapper[4985]: E0125 00:10:17.516545 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516551 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="extract-content" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516638 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebb7e09a-b77f-4c72-b892-177ebd17417c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516648 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="2209b252-4f5b-4f96-bb46-e006fe279367" containerName="route-controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516655 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fcaee3d-2838-4823-b0fd-f6285ebfe74c" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516665 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="4282f471-0e21-407f-af03-77319f7ceb9e" containerName="pruner" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516672 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="61e8a524-c951-4dbd-9f9d-e3698af5e4b5" containerName="controller-manager" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.516680 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="4daece71-11c1-4ef2-8cae-ff8e392d1abe" containerName="registry-server" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.517055 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.519034 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.519288 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.519453 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.519855 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.520037 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.520185 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.520602 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.521351 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.523603 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.523994 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.524898 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.525097 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.525430 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.525592 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.529576 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.552263 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.594040 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.677787 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.678378 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wt8h\" (UniqueName: \"kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.678521 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.678650 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.678751 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.678929 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.679026 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.679094 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.679299 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fm9kl\" (UniqueName: \"kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780533 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780600 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780620 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780641 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780656 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780682 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780714 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fm9kl\" (UniqueName: \"kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780732 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.780753 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wt8h\" (UniqueName: \"kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.781985 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.782379 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.782413 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.782730 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.782872 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.787746 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.787966 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.800384 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fm9kl\" (UniqueName: \"kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl\") pod \"controller-manager-75dd5bcc84-vxtrs\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.804800 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wt8h\" (UniqueName: \"kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h\") pod \"route-controller-manager-5768cb968-97whc\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.859992 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.866602 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.975892 4985 generic.go:334] "Generic (PLEG): container finished" podID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerID="0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8" exitCode=0 Jan 25 00:10:17 crc kubenswrapper[4985]: I0125 00:10:17.977197 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerDied","Data":"0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8"} Jan 25 00:10:18 crc kubenswrapper[4985]: I0125 00:10:18.214899 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:10:18 crc kubenswrapper[4985]: W0125 00:10:18.217769 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a43b154_65f1_46f6_8417_479bd1464b41.slice/crio-34c9e24c4d276f3487452026e7ab58dc51ce1837146c70f6ee6943ced06fac59 WatchSource:0}: Error finding container 34c9e24c4d276f3487452026e7ab58dc51ce1837146c70f6ee6943ced06fac59: Status 404 returned error can't find the container with id 34c9e24c4d276f3487452026e7ab58dc51ce1837146c70f6ee6943ced06fac59 Jan 25 00:10:18 crc kubenswrapper[4985]: I0125 00:10:18.346815 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:10:18 crc kubenswrapper[4985]: I0125 00:10:18.982686 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" event={"ID":"6a43b154-65f1-46f6-8417-479bd1464b41","Type":"ContainerStarted","Data":"34c9e24c4d276f3487452026e7ab58dc51ce1837146c70f6ee6943ced06fac59"} Jan 25 00:10:19 crc kubenswrapper[4985]: I0125 00:10:19.254686 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:10:19 crc kubenswrapper[4985]: I0125 00:10:19.296976 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:10:20 crc kubenswrapper[4985]: I0125 00:10:20.965236 4985 patch_prober.go:28] interesting pod/authentication-operator-69f744f599-t7bhx container/authentication-operator namespace/openshift-authentication-operator: Liveness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 25 00:10:20 crc kubenswrapper[4985]: I0125 00:10:20.965481 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-authentication-operator/authentication-operator-69f744f599-t7bhx" podUID="ec54eb4a-a089-4c2a-9049-00a412be5916" containerName="authentication-operator" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Jan 25 00:10:21 crc kubenswrapper[4985]: W0125 00:10:21.516307 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod42646a8a_9535_4af7_9fc7_73f495747fb3.slice/crio-a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1 WatchSource:0}: Error finding container a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1: Status 404 returned error can't find the container with id a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.003990 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" event={"ID":"42646a8a-9535-4af7-9fc7-73f495747fb3","Type":"ContainerStarted","Data":"fc7b55087a83daae5a0b67ace5ea8c7b18c43d373288f43d5e6c5c0fbc196d8d"} Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.004392 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" event={"ID":"42646a8a-9535-4af7-9fc7-73f495747fb3","Type":"ContainerStarted","Data":"a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1"} Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.004684 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.005959 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.009334 4985 patch_prober.go:28] interesting pod/controller-manager-75dd5bcc84-vxtrs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" start-of-body= Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.009481 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.61:8443/healthz\": dial tcp 10.217.0.61:8443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.010558 4985 patch_prober.go:28] interesting pod/route-controller-manager-5768cb968-97whc container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" start-of-body= Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.010664 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": dial tcp 10.217.0.60:8443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.034891 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" podStartSLOduration=10.03487401 podStartE2EDuration="10.03487401s" podCreationTimestamp="2026-01-25 00:10:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:10:22.032798057 +0000 UTC m=+232.064734340" watchObservedRunningTime="2026-01-25 00:10:22.03487401 +0000 UTC m=+232.066810283" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.054305 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" podStartSLOduration=10.054286525 podStartE2EDuration="10.054286525s" podCreationTimestamp="2026-01-25 00:10:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:10:22.053569107 +0000 UTC m=+232.085505390" watchObservedRunningTime="2026-01-25 00:10:22.054286525 +0000 UTC m=+232.086222798" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.310499 4985 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.311585 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314402 4985 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314683 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b" gracePeriod=15 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314707 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178" gracePeriod=15 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314762 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f" gracePeriod=15 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314721 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58" gracePeriod=15 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.314758 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651" gracePeriod=15 Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.318624 4985 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319032 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319125 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319209 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319291 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319363 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319524 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319598 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319659 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319762 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319819 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319877 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.319928 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.319981 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320030 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320208 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320274 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320329 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320381 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320447 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.320506 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.342023 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443323 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443407 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443429 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443472 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443499 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443521 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443545 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.443569 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544204 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544279 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544300 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544297 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544336 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544371 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544370 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544426 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544427 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544461 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544452 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544494 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544518 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544530 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544629 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.544745 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.621426 4985 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.621717 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.637287 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:10:22 crc kubenswrapper[4985]: W0125 00:10:22.656433 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-dd9cf7dd54299129d2a4994584c0c16b8001d56f2c73b67b35c35aef7334e061 WatchSource:0}: Error finding container dd9cf7dd54299129d2a4994584c0c16b8001d56f2c73b67b35c35aef7334e061: Status 404 returned error can't find the container with id dd9cf7dd54299129d2a4994584c0c16b8001d56f2c73b67b35c35aef7334e061 Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.853351 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.196:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-rl7bj.188dd0d0924986e4 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-rl7bj,UID:7a26fe5e-9560-455a-a98e-6185e89ee607,APIVersion:v1,ResourceVersion:28452,FieldPath:spec.containers{registry-server},},Reason:Created,Message:Created container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,LastTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.930148 4985 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.930578 4985 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.931067 4985 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.931575 4985 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.932508 4985 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:22 crc kubenswrapper[4985]: I0125 00:10:22.932549 4985 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 25 00:10:22 crc kubenswrapper[4985]: E0125 00:10:22.932861 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="200ms" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.017572 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerStarted","Data":"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5"} Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.018199 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.018378 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.018639 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.019127 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"dd9cf7dd54299129d2a4994584c0c16b8001d56f2c73b67b35c35aef7334e061"} Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.020444 4985 generic.go:334] "Generic (PLEG): container finished" podID="83b165bc-18cd-43cb-9c88-18b18c31229d" containerID="ae710686592490a7bd47cc94649616ce5041440e049e28416fdf9ded696d9533" exitCode=0 Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.020517 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"83b165bc-18cd-43cb-9c88-18b18c31229d","Type":"ContainerDied","Data":"ae710686592490a7bd47cc94649616ce5041440e049e28416fdf9ded696d9533"} Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.020818 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.021066 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.021444 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.021708 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.022631 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.023787 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.024398 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178" exitCode=0 Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.024414 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f" exitCode=0 Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.024421 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651" exitCode=0 Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.024428 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58" exitCode=2 Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.024504 4985 scope.go:117] "RemoveContainer" containerID="fa8cf11f39d8dfcedfdb24463094e7e54d31102f7ff31e1697daa2fed1e77aef" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.026871 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" event={"ID":"6a43b154-65f1-46f6-8417-479bd1464b41","Type":"ContainerStarted","Data":"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7"} Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.031246 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.031640 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.032023 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.032365 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.032643 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.032822 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.032942 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.033286 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.033483 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.033669 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.033858 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.034053 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: I0125 00:10:23.034227 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:23 crc kubenswrapper[4985]: E0125 00:10:23.134156 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="400ms" Jan 25 00:10:23 crc kubenswrapper[4985]: E0125 00:10:23.535816 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="800ms" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.034848 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"4e1a042ea414b64dd7f7b3b6fe9f794e44b93d2517f1a0deaaa2f8999aaa9436"} Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.035395 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.035622 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.035873 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.036139 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.036388 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.036623 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.038576 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 25 00:10:24 crc kubenswrapper[4985]: E0125 00:10:24.336930 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="1.6s" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.378085 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.378706 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.379177 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.379638 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.379906 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.380146 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.481364 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir\") pod \"83b165bc-18cd-43cb-9c88-18b18c31229d\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.481459 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "83b165bc-18cd-43cb-9c88-18b18c31229d" (UID: "83b165bc-18cd-43cb-9c88-18b18c31229d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.481486 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock\") pod \"83b165bc-18cd-43cb-9c88-18b18c31229d\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.481518 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock" (OuterVolumeSpecName: "var-lock") pod "83b165bc-18cd-43cb-9c88-18b18c31229d" (UID: "83b165bc-18cd-43cb-9c88-18b18c31229d"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.481566 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access\") pod \"83b165bc-18cd-43cb-9c88-18b18c31229d\" (UID: \"83b165bc-18cd-43cb-9c88-18b18c31229d\") " Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.482017 4985 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-var-lock\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.482039 4985 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/83b165bc-18cd-43cb-9c88-18b18c31229d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.487169 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "83b165bc-18cd-43cb-9c88-18b18c31229d" (UID: "83b165bc-18cd-43cb-9c88-18b18c31229d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:24 crc kubenswrapper[4985]: I0125 00:10:24.583561 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/83b165bc-18cd-43cb-9c88-18b18c31229d-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.046202 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.046266 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"83b165bc-18cd-43cb-9c88-18b18c31229d","Type":"ContainerDied","Data":"0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f"} Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.046308 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a1f3b6e806b2371a41030ef225c39063b180ad3e8018cbdeb9716d3efa2546f" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.057518 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.057963 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.058355 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.058590 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.058816 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.229349 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.230435 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.231068 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.231558 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.231937 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.232202 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.232472 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.232749 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291100 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291218 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291281 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291455 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291495 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291481 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291738 4985 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291818 4985 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:25 crc kubenswrapper[4985]: I0125 00:10:25.291831 4985 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:25 crc kubenswrapper[4985]: E0125 00:10:25.937632 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="3.2s" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.061061 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.062474 4985 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b" exitCode=0 Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.062529 4985 scope.go:117] "RemoveContainer" containerID="0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.062754 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.077726 4985 scope.go:117] "RemoveContainer" containerID="03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.080866 4985 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.081054 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.081240 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.081398 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.081543 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.081682 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.091374 4985 scope.go:117] "RemoveContainer" containerID="fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.107966 4985 scope.go:117] "RemoveContainer" containerID="f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.122864 4985 scope.go:117] "RemoveContainer" containerID="f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.145772 4985 scope.go:117] "RemoveContainer" containerID="94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.165532 4985 scope.go:117] "RemoveContainer" containerID="0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.165921 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\": container with ID starting with 0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178 not found: ID does not exist" containerID="0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.165959 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178"} err="failed to get container status \"0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\": rpc error: code = NotFound desc = could not find container \"0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178\": container with ID starting with 0ff503336ab75761cc408e8445c05e38eb5226913028822c828c778ed61f8178 not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.166021 4985 scope.go:117] "RemoveContainer" containerID="03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.166450 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\": container with ID starting with 03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f not found: ID does not exist" containerID="03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.166482 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f"} err="failed to get container status \"03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\": rpc error: code = NotFound desc = could not find container \"03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f\": container with ID starting with 03aa46b47a694b9ae126b0d33aa6f8407199bb6ee00ee79d869d46f60a690b5f not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.166499 4985 scope.go:117] "RemoveContainer" containerID="fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.166916 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\": container with ID starting with fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651 not found: ID does not exist" containerID="fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.166945 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651"} err="failed to get container status \"fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\": rpc error: code = NotFound desc = could not find container \"fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651\": container with ID starting with fc5036bb64cd9ebf14fdc048682a08f9862e73b80def1a21edea29cfd1671651 not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.166961 4985 scope.go:117] "RemoveContainer" containerID="f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.167366 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\": container with ID starting with f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58 not found: ID does not exist" containerID="f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.167461 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58"} err="failed to get container status \"f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\": rpc error: code = NotFound desc = could not find container \"f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58\": container with ID starting with f49002f7a9cb2ac16e8b2563de9878d3f06fc8019bb90c49160c478011b41d58 not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.167481 4985 scope.go:117] "RemoveContainer" containerID="f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.172894 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\": container with ID starting with f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b not found: ID does not exist" containerID="f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.172924 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b"} err="failed to get container status \"f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\": rpc error: code = NotFound desc = could not find container \"f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b\": container with ID starting with f8c5581add1c7b03be5c1ab651e9fc32666d455e47ec05655ac6e23b1350b55b not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.172941 4985 scope.go:117] "RemoveContainer" containerID="94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99" Jan 25 00:10:26 crc kubenswrapper[4985]: E0125 00:10:26.173260 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\": container with ID starting with 94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99 not found: ID does not exist" containerID="94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.173303 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99"} err="failed to get container status \"94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\": rpc error: code = NotFound desc = could not find container \"94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99\": container with ID starting with 94d287cd7b99ab5921f2dd8a2fd479ba3795d605c2678fde4e1fabaa9038eb99 not found: ID does not exist" Jan 25 00:10:26 crc kubenswrapper[4985]: I0125 00:10:26.286155 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 25 00:10:27 crc kubenswrapper[4985]: E0125 00:10:27.512047 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.196:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-rl7bj.188dd0d0924986e4 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-rl7bj,UID:7a26fe5e-9560-455a-a98e-6185e89ee607,APIVersion:v1,ResourceVersion:28452,FieldPath:spec.containers{registry-server},},Reason:Created,Message:Created container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,LastTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.904392 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.904430 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.961362 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.961928 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.962300 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.962689 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.963068 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:27 crc kubenswrapper[4985]: I0125 00:10:27.963515 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.133551 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.134194 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.134925 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.135399 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.135713 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: I0125 00:10:28.135957 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:28 crc kubenswrapper[4985]: E0125 00:10:28.322663 4985 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.196:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" volumeName="registry-storage" Jan 25 00:10:29 crc kubenswrapper[4985]: E0125 00:10:29.138401 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="6.4s" Jan 25 00:10:30 crc kubenswrapper[4985]: I0125 00:10:30.279638 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:30 crc kubenswrapper[4985]: I0125 00:10:30.280444 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:30 crc kubenswrapper[4985]: I0125 00:10:30.280970 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:30 crc kubenswrapper[4985]: I0125 00:10:30.281541 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:30 crc kubenswrapper[4985]: I0125 00:10:30.282007 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.274199 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.275511 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.276086 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.276674 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.277066 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.277508 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.294464 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.294490 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:35 crc kubenswrapper[4985]: E0125 00:10:35.294736 4985 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.295279 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:35 crc kubenswrapper[4985]: W0125 00:10:35.316912 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-340688398ff4a7df59bf5e78b8cdcf8887e1e915c42082f5a0f8c13c146ca8f8 WatchSource:0}: Error finding container 340688398ff4a7df59bf5e78b8cdcf8887e1e915c42082f5a0f8c13c146ca8f8: Status 404 returned error can't find the container with id 340688398ff4a7df59bf5e78b8cdcf8887e1e915c42082f5a0f8c13c146ca8f8 Jan 25 00:10:35 crc kubenswrapper[4985]: E0125 00:10:35.538925 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="7s" Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.781774 4985 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Liveness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 25 00:10:35 crc kubenswrapper[4985]: I0125 00:10:35.781854 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 25 00:10:36 crc kubenswrapper[4985]: I0125 00:10:36.133984 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"340688398ff4a7df59bf5e78b8cdcf8887e1e915c42082f5a0f8c13c146ca8f8"} Jan 25 00:10:37 crc kubenswrapper[4985]: E0125 00:10:37.513413 4985 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/events\": dial tcp 38.102.83.196:6443: connect: connection refused" event="&Event{ObjectMeta:{redhat-marketplace-rl7bj.188dd0d0924986e4 openshift-marketplace 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-marketplace,Name:redhat-marketplace-rl7bj,UID:7a26fe5e-9560-455a-a98e-6185e89ee607,APIVersion:v1,ResourceVersion:28452,FieldPath:spec.containers{registry-server},},Reason:Created,Message:Created container registry-server,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,LastTimestamp:2026-01-25 00:10:22.852540132 +0000 UTC m=+232.884476405,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.129368 4985 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Readiness probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.130882 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.278577 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.278987 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.279336 4985 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.279629 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.279967 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:40 crc kubenswrapper[4985]: I0125 00:10:40.280358 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.168225 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.168550 4985 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df" exitCode=1 Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.168654 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df"} Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.169595 4985 scope.go:117] "RemoveContainer" containerID="d84a1e822ac2c211cee0bf72d7ec75ef8555e62f15ef863db3381ad639e0f7df" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.169652 4985 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.170305 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.170652 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.170841 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3cc58b9866de0cc08e97c2bbc67e9426d33c20c15ca93ef4c85efeaad8dcf6ed"} Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.171345 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.171854 4985 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.172311 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:41 crc kubenswrapper[4985]: I0125 00:10:41.172609 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.176821 4985 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="3cc58b9866de0cc08e97c2bbc67e9426d33c20c15ca93ef4c85efeaad8dcf6ed" exitCode=0 Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.177060 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"3cc58b9866de0cc08e97c2bbc67e9426d33c20c15ca93ef4c85efeaad8dcf6ed"} Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.177325 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.177339 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.177919 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.178121 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: E0125 00:10:42.178059 4985 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.178336 4985 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.178605 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.178906 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.179537 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.179924 4985 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.182352 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.182486 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8eba319211a285aae1083f4022883f5bbb6c251203e58f9f31a9fd1bd49ef06a"} Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.183647 4985 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.184060 4985 status_manager.go:851] "Failed to get status for pod" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" pod="openshift-marketplace/redhat-marketplace-rl7bj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-rl7bj\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.184544 4985 status_manager.go:851] "Failed to get status for pod" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.184824 4985 status_manager.go:851] "Failed to get status for pod" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/pods/route-controller-manager-5768cb968-97whc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.185047 4985 status_manager.go:851] "Failed to get status for pod" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-controller-manager/pods/controller-manager-75dd5bcc84-vxtrs\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.185305 4985 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.185503 4985 status_manager.go:851] "Failed to get status for pod" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.196:6443: connect: connection refused" Jan 25 00:10:42 crc kubenswrapper[4985]: I0125 00:10:42.425961 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" podUID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" containerName="oauth-openshift" containerID="cri-o://31f6b415083622632b4a278fc4a2c97a699ae2d3af04635ad1ebe492150aa701" gracePeriod=15 Jan 25 00:10:42 crc kubenswrapper[4985]: E0125 00:10:42.540525 4985 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.196:6443: connect: connection refused" interval="7s" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.194863 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"61b055ae52012ebed595394500a763b7606c0c58cdc1b526c541bbbbfa8835bd"} Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.197141 4985 generic.go:334] "Generic (PLEG): container finished" podID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" containerID="31f6b415083622632b4a278fc4a2c97a699ae2d3af04635ad1ebe492150aa701" exitCode=0 Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.197212 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" event={"ID":"7e68708c-6c3b-43d8-8005-1e144e5f8ad1","Type":"ContainerDied","Data":"31f6b415083622632b4a278fc4a2c97a699ae2d3af04635ad1ebe492150aa701"} Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.451547 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.573926 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.573971 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.573993 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c5zwc\" (UniqueName: \"kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574049 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574071 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574091 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574131 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574150 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574180 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574208 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574236 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574281 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574302 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.574321 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle\") pod \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\" (UID: \"7e68708c-6c3b-43d8-8005-1e144e5f8ad1\") " Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.575286 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.575304 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.575328 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.577189 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.577236 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.580011 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.580510 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.580621 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.580830 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.581397 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc" (OuterVolumeSpecName: "kube-api-access-c5zwc") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "kube-api-access-c5zwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.582868 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.585283 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.587926 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.589487 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "7e68708c-6c3b-43d8-8005-1e144e5f8ad1" (UID: "7e68708c-6c3b-43d8-8005-1e144e5f8ad1"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675598 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675637 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675653 4985 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675665 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675678 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675690 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675706 4985 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675719 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675735 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675747 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675760 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675771 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675781 4985 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:43 crc kubenswrapper[4985]: I0125 00:10:43.675791 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c5zwc\" (UniqueName: \"kubernetes.io/projected/7e68708c-6c3b-43d8-8005-1e144e5f8ad1-kube-api-access-c5zwc\") on node \"crc\" DevicePath \"\"" Jan 25 00:10:44 crc kubenswrapper[4985]: I0125 00:10:44.207716 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"0eae24e31f40e559ea397261a37b6b7667c01275ababdf746b4708a9b70ce2de"} Jan 25 00:10:44 crc kubenswrapper[4985]: I0125 00:10:44.210037 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" event={"ID":"7e68708c-6c3b-43d8-8005-1e144e5f8ad1","Type":"ContainerDied","Data":"817342bb8ac2ef285c89b7a1e6d12f8ca5b218b906a719477c0518eab89b47de"} Jan 25 00:10:44 crc kubenswrapper[4985]: I0125 00:10:44.210134 4985 scope.go:117] "RemoveContainer" containerID="31f6b415083622632b4a278fc4a2c97a699ae2d3af04635ad1ebe492150aa701" Jan 25 00:10:44 crc kubenswrapper[4985]: I0125 00:10:44.210117 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-29vvw" Jan 25 00:10:45 crc kubenswrapper[4985]: I0125 00:10:45.223531 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"d1d6f7d034d44fce9030f5b6e9c7dafb656df75fb40c4068e5e32f8f61ad7660"} Jan 25 00:10:45 crc kubenswrapper[4985]: I0125 00:10:45.224400 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b1bf73abde0734bce00546aeb1bbeac1b44ce331ee2150ee44e0dd06429d860c"} Jan 25 00:10:46 crc kubenswrapper[4985]: I0125 00:10:46.233406 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a0c559a325b989b82ced9889515c8f627aff639377fc5dbb188a61ffaf5bcf4f"} Jan 25 00:10:46 crc kubenswrapper[4985]: I0125 00:10:46.233782 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:46 crc kubenswrapper[4985]: I0125 00:10:46.233695 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:46 crc kubenswrapper[4985]: I0125 00:10:46.233806 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:46 crc kubenswrapper[4985]: I0125 00:10:46.242422 4985 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.243919 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_71bb4a3aecc4ba5b26c4b7318770ce13/kube-apiserver-check-endpoints/0.log" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.246950 4985 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="a0c559a325b989b82ced9889515c8f627aff639377fc5dbb188a61ffaf5bcf4f" exitCode=255 Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.246993 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"a0c559a325b989b82ced9889515c8f627aff639377fc5dbb188a61ffaf5bcf4f"} Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.247684 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.247742 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.252970 4985 scope.go:117] "RemoveContainer" containerID="a0c559a325b989b82ced9889515c8f627aff639377fc5dbb188a61ffaf5bcf4f" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.423476 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:10:47 crc kubenswrapper[4985]: I0125 00:10:47.430866 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:10:48 crc kubenswrapper[4985]: I0125 00:10:48.256885 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_71bb4a3aecc4ba5b26c4b7318770ce13/kube-apiserver-check-endpoints/0.log" Jan 25 00:10:48 crc kubenswrapper[4985]: I0125 00:10:48.260283 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"25806b5e06b6c995926746de9b822fc697840741d1b268efa09692219ae39235"} Jan 25 00:10:48 crc kubenswrapper[4985]: I0125 00:10:48.260504 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:10:48 crc kubenswrapper[4985]: I0125 00:10:48.260964 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:48 crc kubenswrapper[4985]: I0125 00:10:48.261001 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:49 crc kubenswrapper[4985]: I0125 00:10:49.269418 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:49 crc kubenswrapper[4985]: I0125 00:10:49.269715 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:49 crc kubenswrapper[4985]: I0125 00:10:49.269753 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:49 crc kubenswrapper[4985]: I0125 00:10:49.651369 4985 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d122494d-9a76-4ce2-bfd7-2f8354b52ac7" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.288923 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.288966 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.296334 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.296523 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.306023 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:50 crc kubenswrapper[4985]: I0125 00:10:50.321984 4985 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d122494d-9a76-4ce2-bfd7-2f8354b52ac7" Jan 25 00:10:51 crc kubenswrapper[4985]: I0125 00:10:51.284370 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:51 crc kubenswrapper[4985]: I0125 00:10:51.285323 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:51 crc kubenswrapper[4985]: I0125 00:10:51.292531 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:10:51 crc kubenswrapper[4985]: I0125 00:10:51.292551 4985 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d122494d-9a76-4ce2-bfd7-2f8354b52ac7" Jan 25 00:10:52 crc kubenswrapper[4985]: I0125 00:10:52.289964 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:52 crc kubenswrapper[4985]: I0125 00:10:52.289999 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:10:52 crc kubenswrapper[4985]: I0125 00:10:52.294542 4985 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="d122494d-9a76-4ce2-bfd7-2f8354b52ac7" Jan 25 00:10:59 crc kubenswrapper[4985]: I0125 00:10:59.292004 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 25 00:11:00 crc kubenswrapper[4985]: I0125 00:11:00.134867 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 25 00:11:00 crc kubenswrapper[4985]: I0125 00:11:00.156237 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 25 00:11:00 crc kubenswrapper[4985]: I0125 00:11:00.454405 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 25 00:11:01 crc kubenswrapper[4985]: I0125 00:11:01.311074 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 25 00:11:01 crc kubenswrapper[4985]: I0125 00:11:01.483821 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.361341 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.446329 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.560908 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.754704 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.777684 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.840154 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 25 00:11:02 crc kubenswrapper[4985]: I0125 00:11:02.840159 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.030322 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.137973 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.248144 4985 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.265447 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.310660 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.504718 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.707164 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.744513 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.755326 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.767735 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.861680 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 25 00:11:03 crc kubenswrapper[4985]: I0125 00:11:03.874436 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.021016 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.135536 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.212962 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.226467 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.686686 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.720795 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.841705 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 25 00:11:04 crc kubenswrapper[4985]: I0125 00:11:04.881284 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.275363 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.277741 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.866516 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.937132 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.980316 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:11:05 crc kubenswrapper[4985]: I0125 00:11:05.989865 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.115655 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.158568 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.248075 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.264335 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.304330 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.579156 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.597865 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.626446 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.656258 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.740379 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 25 00:11:06 crc kubenswrapper[4985]: I0125 00:11:06.919441 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.125829 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.210187 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.257312 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.264753 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.348255 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:11:07 crc kubenswrapper[4985]: I0125 00:11:07.806631 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.069645 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.151502 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.259783 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.269130 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.323385 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.403969 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.416320 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.454153 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.605675 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.611964 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.779171 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.878220 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.929384 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.931272 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 25 00:11:08 crc kubenswrapper[4985]: I0125 00:11:08.968940 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.072216 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.141894 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.220972 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.355981 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.580709 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.639741 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.865922 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 25 00:11:09 crc kubenswrapper[4985]: I0125 00:11:09.918187 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.001864 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.065288 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.069865 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.124633 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.228918 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.344449 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.467043 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.515676 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.706557 4985 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.707642 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=48.707630505 podStartE2EDuration="48.707630505s" podCreationTimestamp="2026-01-25 00:10:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:10:49.58629121 +0000 UTC m=+259.618227483" watchObservedRunningTime="2026-01-25 00:11:10.707630505 +0000 UTC m=+280.739566778" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.708419 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rl7bj" podStartSLOduration=51.650997733 podStartE2EDuration="2m13.708415505s" podCreationTimestamp="2026-01-25 00:08:57 +0000 UTC" firstStartedPulling="2026-01-25 00:08:59.968782132 +0000 UTC m=+150.000718405" lastFinishedPulling="2026-01-25 00:10:22.026199904 +0000 UTC m=+232.058136177" observedRunningTime="2026-01-25 00:10:49.64890483 +0000 UTC m=+259.680841103" watchObservedRunningTime="2026-01-25 00:11:10.708415505 +0000 UTC m=+280.740351778" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.710044 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-authentication/oauth-openshift-558db77b4-29vvw"] Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.710089 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.710500 4985 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.710531 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="4d2e3844-5209-406b-8b7d-90c980e6830d" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.716398 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.726569 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=24.726547938 podStartE2EDuration="24.726547938s" podCreationTimestamp="2026-01-25 00:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:11:10.723873078 +0000 UTC m=+280.755809361" watchObservedRunningTime="2026-01-25 00:11:10.726547938 +0000 UTC m=+280.758484211" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.950585 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.976640 4985 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 25 00:11:10 crc kubenswrapper[4985]: I0125 00:11:10.976898 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://4e1a042ea414b64dd7f7b3b6fe9f794e44b93d2517f1a0deaaa2f8999aaa9436" gracePeriod=5 Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.097310 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.226599 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.239181 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.429067 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.614565 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.653173 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.744197 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.875437 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 25 00:11:11 crc kubenswrapper[4985]: I0125 00:11:11.962399 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.019335 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.296853 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" path="/var/lib/kubelet/pods/7e68708c-6c3b-43d8-8005-1e144e5f8ad1/volumes" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.350529 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.451556 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.613213 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.769517 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.872048 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.872402 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" containerName="controller-manager" containerID="cri-o://da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7" gracePeriod=30 Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.939341 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.988220 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:11:12 crc kubenswrapper[4985]: I0125 00:11:12.988625 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerName="route-controller-manager" containerID="cri-o://fc7b55087a83daae5a0b67ace5ea8c7b18c43d373288f43d5e6c5c0fbc196d8d" gracePeriod=30 Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.113990 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.353335 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.424118 4985 generic.go:334] "Generic (PLEG): container finished" podID="6a43b154-65f1-46f6-8417-479bd1464b41" containerID="da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7" exitCode=0 Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.424160 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" event={"ID":"6a43b154-65f1-46f6-8417-479bd1464b41","Type":"ContainerDied","Data":"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7"} Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.424189 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.424205 4985 scope.go:117] "RemoveContainer" containerID="da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.424194 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs" event={"ID":"6a43b154-65f1-46f6-8417-479bd1464b41","Type":"ContainerDied","Data":"34c9e24c4d276f3487452026e7ab58dc51ce1837146c70f6ee6943ced06fac59"} Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.426086 4985 generic.go:334] "Generic (PLEG): container finished" podID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerID="fc7b55087a83daae5a0b67ace5ea8c7b18c43d373288f43d5e6c5c0fbc196d8d" exitCode=0 Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.426136 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" event={"ID":"42646a8a-9535-4af7-9fc7-73f495747fb3","Type":"ContainerDied","Data":"fc7b55087a83daae5a0b67ace5ea8c7b18c43d373288f43d5e6c5c0fbc196d8d"} Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.426162 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" event={"ID":"42646a8a-9535-4af7-9fc7-73f495747fb3","Type":"ContainerDied","Data":"a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1"} Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.426173 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a23469bb95481633564eb5c126f9cdf77b640ae9dc39120e4d308a0873c6d7a1" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.436447 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.439630 4985 scope.go:117] "RemoveContainer" containerID="da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7" Jan 25 00:11:13 crc kubenswrapper[4985]: E0125 00:11:13.439924 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7\": container with ID starting with da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7 not found: ID does not exist" containerID="da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.439955 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7"} err="failed to get container status \"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7\": rpc error: code = NotFound desc = could not find container \"da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7\": container with ID starting with da6dbb7d6882081b42fabdb5ba3b8e49b51e8c967a4601dfa7f3c852de984ca7 not found: ID does not exist" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.505393 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config\") pod \"6a43b154-65f1-46f6-8417-479bd1464b41\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.505674 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert\") pod \"6a43b154-65f1-46f6-8417-479bd1464b41\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.505783 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fm9kl\" (UniqueName: \"kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl\") pod \"6a43b154-65f1-46f6-8417-479bd1464b41\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.505885 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert\") pod \"42646a8a-9535-4af7-9fc7-73f495747fb3\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506010 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca\") pod \"6a43b154-65f1-46f6-8417-479bd1464b41\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506142 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config\") pod \"42646a8a-9535-4af7-9fc7-73f495747fb3\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506275 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca\") pod \"42646a8a-9535-4af7-9fc7-73f495747fb3\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506398 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles\") pod \"6a43b154-65f1-46f6-8417-479bd1464b41\" (UID: \"6a43b154-65f1-46f6-8417-479bd1464b41\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506509 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca" (OuterVolumeSpecName: "client-ca") pod "6a43b154-65f1-46f6-8417-479bd1464b41" (UID: "6a43b154-65f1-46f6-8417-479bd1464b41"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506768 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.506763 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config" (OuterVolumeSpecName: "config") pod "6a43b154-65f1-46f6-8417-479bd1464b41" (UID: "6a43b154-65f1-46f6-8417-479bd1464b41"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.507043 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6a43b154-65f1-46f6-8417-479bd1464b41" (UID: "6a43b154-65f1-46f6-8417-479bd1464b41"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.507053 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca" (OuterVolumeSpecName: "client-ca") pod "42646a8a-9535-4af7-9fc7-73f495747fb3" (UID: "42646a8a-9535-4af7-9fc7-73f495747fb3"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.507138 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config" (OuterVolumeSpecName: "config") pod "42646a8a-9535-4af7-9fc7-73f495747fb3" (UID: "42646a8a-9535-4af7-9fc7-73f495747fb3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.511210 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "42646a8a-9535-4af7-9fc7-73f495747fb3" (UID: "42646a8a-9535-4af7-9fc7-73f495747fb3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.511600 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6a43b154-65f1-46f6-8417-479bd1464b41" (UID: "6a43b154-65f1-46f6-8417-479bd1464b41"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.515650 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl" (OuterVolumeSpecName: "kube-api-access-fm9kl") pod "6a43b154-65f1-46f6-8417-479bd1464b41" (UID: "6a43b154-65f1-46f6-8417-479bd1464b41"). InnerVolumeSpecName "kube-api-access-fm9kl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.607359 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2wt8h\" (UniqueName: \"kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h\") pod \"42646a8a-9535-4af7-9fc7-73f495747fb3\" (UID: \"42646a8a-9535-4af7-9fc7-73f495747fb3\") " Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608373 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608389 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608402 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a43b154-65f1-46f6-8417-479bd1464b41-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608420 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a43b154-65f1-46f6-8417-479bd1464b41-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608429 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fm9kl\" (UniqueName: \"kubernetes.io/projected/6a43b154-65f1-46f6-8417-479bd1464b41-kube-api-access-fm9kl\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608440 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/42646a8a-9535-4af7-9fc7-73f495747fb3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.608449 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/42646a8a-9535-4af7-9fc7-73f495747fb3-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.610793 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h" (OuterVolumeSpecName: "kube-api-access-2wt8h") pod "42646a8a-9535-4af7-9fc7-73f495747fb3" (UID: "42646a8a-9535-4af7-9fc7-73f495747fb3"). InnerVolumeSpecName "kube-api-access-2wt8h". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.706223 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.710675 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2wt8h\" (UniqueName: \"kubernetes.io/projected/42646a8a-9535-4af7-9fc7-73f495747fb3-kube-api-access-2wt8h\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.750720 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.754174 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-75dd5bcc84-vxtrs"] Jan 25 00:11:13 crc kubenswrapper[4985]: I0125 00:11:13.829233 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.026902 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.055728 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.288021 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" path="/var/lib/kubelet/pods/6a43b154-65f1-46f6-8417-479bd1464b41/volumes" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.434209 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-5768cb968-97whc" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.466184 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.472859 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-5768cb968-97whc"] Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.833039 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 25 00:11:14 crc kubenswrapper[4985]: I0125 00:11:14.844273 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 25 00:11:15 crc kubenswrapper[4985]: I0125 00:11:15.846154 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.289517 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" path="/var/lib/kubelet/pods/42646a8a-9535-4af7-9fc7-73f495747fb3/volumes" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.452560 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.452634 4985 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="4e1a042ea414b64dd7f7b3b6fe9f794e44b93d2517f1a0deaaa2f8999aaa9436" exitCode=137 Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.573609 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.574026 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748163 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748282 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748310 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748357 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748380 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748673 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748691 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748767 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.748783 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.759329 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.849801 4985 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.849857 4985 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.849869 4985 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.849882 4985 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:16 crc kubenswrapper[4985]: I0125 00:11:16.849894 4985 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:17 crc kubenswrapper[4985]: I0125 00:11:17.461856 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 25 00:11:17 crc kubenswrapper[4985]: I0125 00:11:17.461991 4985 scope.go:117] "RemoveContainer" containerID="4e1a042ea414b64dd7f7b3b6fe9f794e44b93d2517f1a0deaaa2f8999aaa9436" Jan 25 00:11:17 crc kubenswrapper[4985]: I0125 00:11:17.462037 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.284619 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.285066 4985 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.295743 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.295792 4985 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="6da64400-f25e-41e5-ad15-c36a7889e4fd" Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.299765 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 25 00:11:18 crc kubenswrapper[4985]: I0125 00:11:18.299806 4985 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="6da64400-f25e-41e5-ad15-c36a7889e4fd" Jan 25 00:11:22 crc kubenswrapper[4985]: I0125 00:11:22.074859 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 25 00:11:22 crc kubenswrapper[4985]: I0125 00:11:22.538182 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 25 00:11:22 crc kubenswrapper[4985]: I0125 00:11:22.874271 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 25 00:11:22 crc kubenswrapper[4985]: I0125 00:11:22.912218 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 25 00:11:23 crc kubenswrapper[4985]: I0125 00:11:23.521765 4985 generic.go:334] "Generic (PLEG): container finished" podID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerID="65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5" exitCode=0 Jan 25 00:11:23 crc kubenswrapper[4985]: I0125 00:11:23.521828 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerDied","Data":"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5"} Jan 25 00:11:23 crc kubenswrapper[4985]: I0125 00:11:23.522422 4985 scope.go:117] "RemoveContainer" containerID="65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5" Jan 25 00:11:23 crc kubenswrapper[4985]: I0125 00:11:23.846566 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 25 00:11:24 crc kubenswrapper[4985]: I0125 00:11:24.435052 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 25 00:11:24 crc kubenswrapper[4985]: I0125 00:11:24.531096 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerStarted","Data":"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b"} Jan 25 00:11:24 crc kubenswrapper[4985]: I0125 00:11:24.533559 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:11:24 crc kubenswrapper[4985]: I0125 00:11:24.535994 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:11:24 crc kubenswrapper[4985]: I0125 00:11:24.646266 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 25 00:11:25 crc kubenswrapper[4985]: I0125 00:11:25.115894 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 25 00:11:25 crc kubenswrapper[4985]: I0125 00:11:25.380411 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 25 00:11:25 crc kubenswrapper[4985]: I0125 00:11:25.769168 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 25 00:11:27 crc kubenswrapper[4985]: I0125 00:11:27.368934 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 25 00:11:27 crc kubenswrapper[4985]: I0125 00:11:27.606940 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 25 00:11:27 crc kubenswrapper[4985]: I0125 00:11:27.912380 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 25 00:11:29 crc kubenswrapper[4985]: I0125 00:11:29.125776 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 25 00:11:29 crc kubenswrapper[4985]: I0125 00:11:29.795167 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 25 00:11:29 crc kubenswrapper[4985]: I0125 00:11:29.969514 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.122023 4985 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.290871 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.448378 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.455387 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.578384 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.637090 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.761141 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.807845 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 25 00:11:30 crc kubenswrapper[4985]: I0125 00:11:30.860773 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.054782 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.567347 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.717013 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.752858 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.939780 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:31 crc kubenswrapper[4985]: E0125 00:11:31.940282 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" containerName="controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.940441 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" containerName="controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: E0125 00:11:31.940536 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerName="route-controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.940619 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerName="route-controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: E0125 00:11:31.940715 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" containerName="oauth-openshift" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.940811 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" containerName="oauth-openshift" Jan 25 00:11:31 crc kubenswrapper[4985]: E0125 00:11:31.940919 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941003 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 25 00:11:31 crc kubenswrapper[4985]: E0125 00:11:31.941085 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" containerName="installer" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941201 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" containerName="installer" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941400 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a43b154-65f1-46f6-8417-479bd1464b41" containerName="controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941497 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941591 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="42646a8a-9535-4af7-9fc7-73f495747fb3" containerName="route-controller-manager" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941725 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="83b165bc-18cd-43cb-9c88-18b18c31229d" containerName="installer" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.941839 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="7e68708c-6c3b-43d8-8005-1e144e5f8ad1" containerName="oauth-openshift" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.942370 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.942557 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6d78cc5f67-dx974"] Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.942981 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.946674 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.947043 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.947595 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.948457 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.949711 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.949849 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.951827 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.952451 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.952661 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.952766 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.952921 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.952999 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.953101 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.953207 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.953218 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.953252 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.953994 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.954445 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.961509 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.963629 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.975523 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982760 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r5f8\" (UniqueName: \"kubernetes.io/projected/78955bf8-bdcd-425f-8390-5f152be8c10e-kube-api-access-6r5f8\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982816 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982846 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982873 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982904 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982926 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982950 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-policies\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.982978 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-session\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983009 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-error\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983033 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983055 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-login\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983079 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983131 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983155 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983175 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-dir\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983200 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983232 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f696m\" (UniqueName: \"kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:31 crc kubenswrapper[4985]: I0125 00:11:31.983251 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.012090 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.069508 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084091 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084221 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084270 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084310 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-policies\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084368 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-session\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084424 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-error\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084460 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084495 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-login\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084532 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084575 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084613 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084645 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-dir\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084683 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084750 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f696m\" (UniqueName: \"kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084800 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084855 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r5f8\" (UniqueName: \"kubernetes.io/projected/78955bf8-bdcd-425f-8390-5f152be8c10e-kube-api-access-6r5f8\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084898 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.084934 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.085228 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-service-ca\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.085697 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.086301 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-dir\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.086610 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.087917 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.087998 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/78955bf8-bdcd-425f-8390-5f152be8c10e-audit-policies\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.089325 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.092056 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.092609 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.093761 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.094196 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-router-certs\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.094287 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.094323 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.096020 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-login\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.099169 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-system-session\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.106985 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/78955bf8-bdcd-425f-8390-5f152be8c10e-v4-0-config-user-template-error\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.111951 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f696m\" (UniqueName: \"kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m\") pod \"route-controller-manager-75f6f8944-r6lvf\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.112343 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r5f8\" (UniqueName: \"kubernetes.io/projected/78955bf8-bdcd-425f-8390-5f152be8c10e-kube-api-access-6r5f8\") pod \"oauth-openshift-6d78cc5f67-dx974\" (UID: \"78955bf8-bdcd-425f-8390-5f152be8c10e\") " pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.233294 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.270942 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.289270 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.424848 4985 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.783364 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.828446 4985 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.848446 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.868377 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.868989 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.871203 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.871368 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.871723 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.871873 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.872079 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.872221 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.888903 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.895953 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.896260 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-szdmg\" (UniqueName: \"kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.896288 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.896335 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.896378 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.912774 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.996882 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.996946 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.996981 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.997013 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.997032 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-szdmg\" (UniqueName: \"kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.998947 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.999181 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:32 crc kubenswrapper[4985]: I0125 00:11:32.999887 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:33 crc kubenswrapper[4985]: I0125 00:11:33.002240 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:33 crc kubenswrapper[4985]: I0125 00:11:33.012740 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-szdmg\" (UniqueName: \"kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg\") pod \"controller-manager-f7fc999fd-9hhhf\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:33 crc kubenswrapper[4985]: I0125 00:11:33.106997 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 25 00:11:33 crc kubenswrapper[4985]: I0125 00:11:33.196593 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:33 crc kubenswrapper[4985]: I0125 00:11:33.762732 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 25 00:11:34 crc kubenswrapper[4985]: I0125 00:11:34.253994 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 25 00:11:34 crc kubenswrapper[4985]: I0125 00:11:34.280351 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 25 00:11:34 crc kubenswrapper[4985]: I0125 00:11:34.372263 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.017256 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.017879 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.027700 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.090866 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.359155 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.465790 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.676304 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 25 00:11:35 crc kubenswrapper[4985]: I0125 00:11:35.939917 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 25 00:11:36 crc kubenswrapper[4985]: I0125 00:11:36.051844 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 25 00:11:36 crc kubenswrapper[4985]: I0125 00:11:36.125651 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 25 00:11:36 crc kubenswrapper[4985]: I0125 00:11:36.616807 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 25 00:11:36 crc kubenswrapper[4985]: I0125 00:11:36.895784 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 25 00:11:36 crc kubenswrapper[4985]: I0125 00:11:36.982510 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.015489 4985 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.348427 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.447708 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.647146 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.810987 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.888666 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 25 00:11:37 crc kubenswrapper[4985]: I0125 00:11:37.899176 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 25 00:11:38 crc kubenswrapper[4985]: I0125 00:11:38.013076 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 25 00:11:38 crc kubenswrapper[4985]: I0125 00:11:38.194831 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 25 00:11:38 crc kubenswrapper[4985]: I0125 00:11:38.299393 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 25 00:11:39 crc kubenswrapper[4985]: I0125 00:11:39.051781 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 25 00:11:39 crc kubenswrapper[4985]: I0125 00:11:39.450514 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 25 00:11:39 crc kubenswrapper[4985]: I0125 00:11:39.591272 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 25 00:11:39 crc kubenswrapper[4985]: I0125 00:11:39.894690 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 25 00:11:39 crc kubenswrapper[4985]: I0125 00:11:39.917040 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 25 00:11:40 crc kubenswrapper[4985]: I0125 00:11:40.013124 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 25 00:11:40 crc kubenswrapper[4985]: I0125 00:11:40.107781 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 25 00:11:40 crc kubenswrapper[4985]: I0125 00:11:40.478875 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 25 00:11:41 crc kubenswrapper[4985]: I0125 00:11:41.615795 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 25 00:11:41 crc kubenswrapper[4985]: I0125 00:11:41.639747 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 25 00:11:41 crc kubenswrapper[4985]: I0125 00:11:41.640164 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 25 00:11:41 crc kubenswrapper[4985]: I0125 00:11:41.695899 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 25 00:11:42 crc kubenswrapper[4985]: I0125 00:11:42.686956 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 25 00:11:42 crc kubenswrapper[4985]: I0125 00:11:42.706239 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 25 00:11:43 crc kubenswrapper[4985]: I0125 00:11:43.031236 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 25 00:11:43 crc kubenswrapper[4985]: I0125 00:11:43.347934 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 25 00:11:43 crc kubenswrapper[4985]: I0125 00:11:43.459804 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 25 00:11:43 crc kubenswrapper[4985]: I0125 00:11:43.736549 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 25 00:11:43 crc kubenswrapper[4985]: I0125 00:11:43.789010 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 25 00:11:44 crc kubenswrapper[4985]: I0125 00:11:44.301421 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 25 00:11:44 crc kubenswrapper[4985]: I0125 00:11:44.306533 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 25 00:11:44 crc kubenswrapper[4985]: I0125 00:11:44.601061 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.032414 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.225696 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.275482 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.314384 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.387889 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.489659 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.554956 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.881074 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 25 00:11:45 crc kubenswrapper[4985]: I0125 00:11:45.888194 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 25 00:11:46 crc kubenswrapper[4985]: I0125 00:11:46.138341 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 25 00:11:46 crc kubenswrapper[4985]: I0125 00:11:46.144926 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 25 00:11:46 crc kubenswrapper[4985]: I0125 00:11:46.234779 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 25 00:11:46 crc kubenswrapper[4985]: I0125 00:11:46.657622 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 25 00:11:46 crc kubenswrapper[4985]: I0125 00:11:46.941034 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.105296 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.111719 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6d78cc5f67-dx974"] Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.131426 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.300750 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.363287 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:11:47 crc kubenswrapper[4985]: W0125 00:11:47.377630 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8324e2d0_7f92_4cb7_80cd_ead91932d128.slice/crio-cbcfb784f71b6c110fafd3c4f72ec50712986b662081064b6beda30d5f251d1e WatchSource:0}: Error finding container cbcfb784f71b6c110fafd3c4f72ec50712986b662081064b6beda30d5f251d1e: Status 404 returned error can't find the container with id cbcfb784f71b6c110fafd3c4f72ec50712986b662081064b6beda30d5f251d1e Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.415659 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6d78cc5f67-dx974"] Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.464210 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.517072 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.724421 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" event={"ID":"8324e2d0-7f92-4cb7-80cd-ead91932d128","Type":"ContainerStarted","Data":"5e49c26950f10cc35508e0372006e08121988998b908f4c51ec83116889ad1e0"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.724472 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" event={"ID":"8324e2d0-7f92-4cb7-80cd-ead91932d128","Type":"ContainerStarted","Data":"cbcfb784f71b6c110fafd3c4f72ec50712986b662081064b6beda30d5f251d1e"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.725344 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.727202 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" event={"ID":"78955bf8-bdcd-425f-8390-5f152be8c10e","Type":"ContainerStarted","Data":"c629273c1b810a45fd0ae228ecc673b2834e86945ee53a12a90064472ecc1d5d"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.727249 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" event={"ID":"78955bf8-bdcd-425f-8390-5f152be8c10e","Type":"ContainerStarted","Data":"827813756d97114fba2d7a489ad916082adf48dcec73b544295cceb101406ac5"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.727268 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.728826 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" event={"ID":"f09efd7c-ee8f-4889-a0c0-a8277fca90ce","Type":"ContainerStarted","Data":"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.728987 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" event={"ID":"f09efd7c-ee8f-4889-a0c0-a8277fca90ce","Type":"ContainerStarted","Data":"3cd390c5689cca6232f00a2abd4f9fa7d772b0f9c7a71a255b092bfe99a693ba"} Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.729021 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerName="route-controller-manager" containerID="cri-o://8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5" gracePeriod=30 Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.729146 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.732169 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.751735 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" podStartSLOduration=15.751717933 podStartE2EDuration="15.751717933s" podCreationTimestamp="2026-01-25 00:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:11:47.749719462 +0000 UTC m=+317.781655755" watchObservedRunningTime="2026-01-25 00:11:47.751717933 +0000 UTC m=+317.783654216" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.766408 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.772517 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" podStartSLOduration=90.772482564 podStartE2EDuration="1m30.772482564s" podCreationTimestamp="2026-01-25 00:10:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:11:47.768610823 +0000 UTC m=+317.800547116" watchObservedRunningTime="2026-01-25 00:11:47.772482564 +0000 UTC m=+317.804418847" Jan 25 00:11:47 crc kubenswrapper[4985]: I0125 00:11:47.783540 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" podStartSLOduration=34.783524101 podStartE2EDuration="34.783524101s" podCreationTimestamp="2026-01-25 00:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:11:47.78154368 +0000 UTC m=+317.813479963" watchObservedRunningTime="2026-01-25 00:11:47.783524101 +0000 UTC m=+317.815460374" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.021236 4985 patch_prober.go:28] interesting pod/route-controller-manager-75f6f8944-r6lvf container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.64:8443/healthz\": read tcp 10.217.0.2:40770->10.217.0.64:8443: read: connection reset by peer" start-of-body= Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.021743 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.64:8443/healthz\": read tcp 10.217.0.2:40770->10.217.0.64:8443: read: connection reset by peer" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.260884 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6d78cc5f67-dx974" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.302499 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.367900 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-75f6f8944-r6lvf_f09efd7c-ee8f-4889-a0c0-a8277fca90ce/route-controller-manager/0.log" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.367988 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.391380 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.397326 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:11:48 crc kubenswrapper[4985]: E0125 00:11:48.397648 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerName="route-controller-manager" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.397675 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerName="route-controller-manager" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.397830 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerName="route-controller-manager" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.398442 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.415704 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519173 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config\") pod \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519230 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca\") pod \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519272 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f696m\" (UniqueName: \"kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m\") pod \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519353 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert\") pod \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\" (UID: \"f09efd7c-ee8f-4889-a0c0-a8277fca90ce\") " Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519540 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wd8dt\" (UniqueName: \"kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519616 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519675 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.519710 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.520228 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca" (OuterVolumeSpecName: "client-ca") pod "f09efd7c-ee8f-4889-a0c0-a8277fca90ce" (UID: "f09efd7c-ee8f-4889-a0c0-a8277fca90ce"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.520918 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config" (OuterVolumeSpecName: "config") pod "f09efd7c-ee8f-4889-a0c0-a8277fca90ce" (UID: "f09efd7c-ee8f-4889-a0c0-a8277fca90ce"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.528919 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m" (OuterVolumeSpecName: "kube-api-access-f696m") pod "f09efd7c-ee8f-4889-a0c0-a8277fca90ce" (UID: "f09efd7c-ee8f-4889-a0c0-a8277fca90ce"). InnerVolumeSpecName "kube-api-access-f696m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.531313 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f09efd7c-ee8f-4889-a0c0-a8277fca90ce" (UID: "f09efd7c-ee8f-4889-a0c0-a8277fca90ce"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.620937 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621006 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621042 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621076 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wd8dt\" (UniqueName: \"kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621157 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f696m\" (UniqueName: \"kubernetes.io/projected/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-kube-api-access-f696m\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621174 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621187 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.621200 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f09efd7c-ee8f-4889-a0c0-a8277fca90ce-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.622252 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.622763 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.626434 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.641892 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wd8dt\" (UniqueName: \"kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt\") pod \"route-controller-manager-7b66f58d44-j64wf\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.725834 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.727462 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.732392 4985 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738226 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-75f6f8944-r6lvf_f09efd7c-ee8f-4889-a0c0-a8277fca90ce/route-controller-manager/0.log" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738285 4985 generic.go:334] "Generic (PLEG): container finished" podID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" containerID="8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5" exitCode=255 Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738379 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" event={"ID":"f09efd7c-ee8f-4889-a0c0-a8277fca90ce","Type":"ContainerDied","Data":"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5"} Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738425 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738451 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf" event={"ID":"f09efd7c-ee8f-4889-a0c0-a8277fca90ce","Type":"ContainerDied","Data":"3cd390c5689cca6232f00a2abd4f9fa7d772b0f9c7a71a255b092bfe99a693ba"} Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.738488 4985 scope.go:117] "RemoveContainer" containerID="8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.769997 4985 scope.go:117] "RemoveContainer" containerID="8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5" Jan 25 00:11:48 crc kubenswrapper[4985]: E0125 00:11:48.771055 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5\": container with ID starting with 8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5 not found: ID does not exist" containerID="8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.771171 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5"} err="failed to get container status \"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5\": rpc error: code = NotFound desc = could not find container \"8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5\": container with ID starting with 8e9214d15833354880d51d2fdcf09cf6750ee6053f08331899304623cab4e2a5 not found: ID does not exist" Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.789576 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:48 crc kubenswrapper[4985]: I0125 00:11:48.794555 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-r6lvf"] Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.184486 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:11:49 crc kubenswrapper[4985]: W0125 00:11:49.191585 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e37f6bb_e178_4724_9320_a4add7248799.slice/crio-65e005cc09ecc5c19e6cfee133d9a3d1684d357e24135b1f66c841ac732e01d4 WatchSource:0}: Error finding container 65e005cc09ecc5c19e6cfee133d9a3d1684d357e24135b1f66c841ac732e01d4: Status 404 returned error can't find the container with id 65e005cc09ecc5c19e6cfee133d9a3d1684d357e24135b1f66c841ac732e01d4 Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.611783 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.673059 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.693840 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.747491 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" event={"ID":"8e37f6bb-e178-4724-9320-a4add7248799","Type":"ContainerStarted","Data":"74d0f7e4d65923ce1a62d074eaf0907f9f0e193a7a0bfd7a8eb3e1bb0400ba85"} Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.747523 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" event={"ID":"8e37f6bb-e178-4724-9320-a4add7248799","Type":"ContainerStarted","Data":"65e005cc09ecc5c19e6cfee133d9a3d1684d357e24135b1f66c841ac732e01d4"} Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.748191 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:49 crc kubenswrapper[4985]: I0125 00:11:49.773155 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" podStartSLOduration=17.773130263 podStartE2EDuration="17.773130263s" podCreationTimestamp="2026-01-25 00:11:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:11:49.772945238 +0000 UTC m=+319.804881521" watchObservedRunningTime="2026-01-25 00:11:49.773130263 +0000 UTC m=+319.805066576" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.085310 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.281933 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f09efd7c-ee8f-4889-a0c0-a8277fca90ce" path="/var/lib/kubelet/pods/f09efd7c-ee8f-4889-a0c0-a8277fca90ce/volumes" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.376720 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.624789 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.626153 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 25 00:11:50 crc kubenswrapper[4985]: I0125 00:11:50.648411 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.039744 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.388439 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.569453 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.784053 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.843970 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 25 00:11:51 crc kubenswrapper[4985]: I0125 00:11:51.957723 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 25 00:11:52 crc kubenswrapper[4985]: I0125 00:11:52.176618 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 25 00:11:54 crc kubenswrapper[4985]: I0125 00:11:54.395725 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.773409 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.774231 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nsrmt" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="registry-server" containerID="cri-o://e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.786320 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.786871 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-77pvn" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="registry-server" containerID="cri-o://b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.793087 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.793435 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" containerID="cri-o://405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.806256 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.806523 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rl7bj" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="registry-server" containerID="cri-o://e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.814770 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rb5dx"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.815741 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.828373 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.828739 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d5j6f" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" containerID="cri-o://3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.838196 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rb5dx"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.842873 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.845332 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-zcf2n" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" containerID="cri-o://65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb" gracePeriod=30 Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.907176 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/redhat-operators-zcf2n" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" probeResult="failure" output="" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.913731 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/redhat-operators-zcf2n" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" probeResult="failure" output="" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.987692 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m789z\" (UniqueName: \"kubernetes.io/projected/6a7626ea-b9ae-40c0-a15d-26059903fb75-kube-api-access-m789z\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.987751 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:58 crc kubenswrapper[4985]: I0125 00:11:58.987886 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.088631 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.088722 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m789z\" (UniqueName: \"kubernetes.io/projected/6a7626ea-b9ae-40c0-a15d-26059903fb75-kube-api-access-m789z\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.088785 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.091244 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.094860 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/6a7626ea-b9ae-40c0-a15d-26059903fb75-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.103135 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m789z\" (UniqueName: \"kubernetes.io/projected/6a7626ea-b9ae-40c0-a15d-26059903fb75-kube-api-access-m789z\") pod \"marketplace-operator-79b997595-rb5dx\" (UID: \"6a7626ea-b9ae-40c0-a15d-26059903fb75\") " pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.152396 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.210477 4985 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a is running failed: container process not found" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" cmd=["grpc_health_probe","-addr=:50051"] Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.211167 4985 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a is running failed: container process not found" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" cmd=["grpc_health_probe","-addr=:50051"] Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.212185 4985 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a is running failed: container process not found" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" cmd=["grpc_health_probe","-addr=:50051"] Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.212261 4985 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-d5j6f" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.419701 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.596548 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content\") pod \"badea0b3-377c-4171-931a-2fc2a9a07922\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.596651 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities\") pod \"badea0b3-377c-4171-931a-2fc2a9a07922\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.596681 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tm5cl\" (UniqueName: \"kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl\") pod \"badea0b3-377c-4171-931a-2fc2a9a07922\" (UID: \"badea0b3-377c-4171-931a-2fc2a9a07922\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.598269 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities" (OuterVolumeSpecName: "utilities") pod "badea0b3-377c-4171-931a-2fc2a9a07922" (UID: "badea0b3-377c-4171-931a-2fc2a9a07922"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.603225 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl" (OuterVolumeSpecName: "kube-api-access-tm5cl") pod "badea0b3-377c-4171-931a-2fc2a9a07922" (UID: "badea0b3-377c-4171-931a-2fc2a9a07922"). InnerVolumeSpecName "kube-api-access-tm5cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.668658 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "badea0b3-377c-4171-931a-2fc2a9a07922" (UID: "badea0b3-377c-4171-931a-2fc2a9a07922"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.673485 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.682077 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.685586 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.700315 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.700348 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tm5cl\" (UniqueName: \"kubernetes.io/projected/badea0b3-377c-4171-931a-2fc2a9a07922-kube-api-access-tm5cl\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.700359 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/badea0b3-377c-4171-931a-2fc2a9a07922-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.716955 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.720577 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.790236 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rb5dx"] Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800780 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xzwjt\" (UniqueName: \"kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt\") pod \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800825 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities\") pod \"7a26fe5e-9560-455a-a98e-6185e89ee607\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800871 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content\") pod \"7a26fe5e-9560-455a-a98e-6185e89ee607\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800898 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities\") pod \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800933 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities\") pod \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800954 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz6sd\" (UniqueName: \"kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd\") pod \"7a26fe5e-9560-455a-a98e-6185e89ee607\" (UID: \"7a26fe5e-9560-455a-a98e-6185e89ee607\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.800974 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content\") pod \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.801014 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content\") pod \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\" (UID: \"841f3be9-8a92-4e9e-af89-ddf60ffc736e\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.801028 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z89zm\" (UniqueName: \"kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm\") pod \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\" (UID: \"edab77a9-c9b1-44b8-8b21-275fc3bcdd81\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.802283 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities" (OuterVolumeSpecName: "utilities") pod "edab77a9-c9b1-44b8-8b21-275fc3bcdd81" (UID: "edab77a9-c9b1-44b8-8b21-275fc3bcdd81"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.802503 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities" (OuterVolumeSpecName: "utilities") pod "7a26fe5e-9560-455a-a98e-6185e89ee607" (UID: "7a26fe5e-9560-455a-a98e-6185e89ee607"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.807754 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt" (OuterVolumeSpecName: "kube-api-access-xzwjt") pod "841f3be9-8a92-4e9e-af89-ddf60ffc736e" (UID: "841f3be9-8a92-4e9e-af89-ddf60ffc736e"). InnerVolumeSpecName "kube-api-access-xzwjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.808342 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm" (OuterVolumeSpecName: "kube-api-access-z89zm") pod "edab77a9-c9b1-44b8-8b21-275fc3bcdd81" (UID: "edab77a9-c9b1-44b8-8b21-275fc3bcdd81"). InnerVolumeSpecName "kube-api-access-z89zm". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.810077 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities" (OuterVolumeSpecName: "utilities") pod "841f3be9-8a92-4e9e-af89-ddf60ffc736e" (UID: "841f3be9-8a92-4e9e-af89-ddf60ffc736e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.811801 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd" (OuterVolumeSpecName: "kube-api-access-nz6sd") pod "7a26fe5e-9560-455a-a98e-6185e89ee607" (UID: "7a26fe5e-9560-455a-a98e-6185e89ee607"). InnerVolumeSpecName "kube-api-access-nz6sd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.830066 4985 generic.go:334] "Generic (PLEG): container finished" podID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerID="405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.830140 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerDied","Data":"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.830169 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" event={"ID":"e1ea9185-aa51-4b82-98ed-b2f028d291b2","Type":"ContainerDied","Data":"1d618d640e058c479e8de5ed06522c0b9820ef50210f727368cb3b22b268234c"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.830186 4985 scope.go:117] "RemoveContainer" containerID="405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.830288 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fwpcj" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.833618 4985 generic.go:334] "Generic (PLEG): container finished" podID="badea0b3-377c-4171-931a-2fc2a9a07922" containerID="b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.833709 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerDied","Data":"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.833748 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-77pvn" event={"ID":"badea0b3-377c-4171-931a-2fc2a9a07922","Type":"ContainerDied","Data":"59aafd4f3c9de8ebf94ba598e68a001ec4d574ac12acc413925eed46aa0c8ad4"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.833757 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-77pvn" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.837406 4985 generic.go:334] "Generic (PLEG): container finished" podID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerID="e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.837495 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerDied","Data":"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.837524 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nsrmt" event={"ID":"841f3be9-8a92-4e9e-af89-ddf60ffc736e","Type":"ContainerDied","Data":"4a76a13ede9ca07167e6278fb8c93305b1e73c4e05ffc6ea4c2ebce5d1f1641a"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.837613 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nsrmt" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.845324 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" event={"ID":"6a7626ea-b9ae-40c0-a15d-26059903fb75","Type":"ContainerStarted","Data":"d9dda9416e5446751d5e705c671ea83655b92d5272690948f4ea3b35ff1c327c"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.845585 4985 scope.go:117] "RemoveContainer" containerID="65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.846224 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7a26fe5e-9560-455a-a98e-6185e89ee607" (UID: "7a26fe5e-9560-455a-a98e-6185e89ee607"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.850550 4985 generic.go:334] "Generic (PLEG): container finished" podID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerID="e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.850634 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerDied","Data":"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.850659 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rl7bj" event={"ID":"7a26fe5e-9560-455a-a98e-6185e89ee607","Type":"ContainerDied","Data":"12cf13d4b13d34c74562aa115d78e7dff784edafebd18ff6a62677963cecacc9"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.850753 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rl7bj" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.861971 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.863558 4985 generic.go:334] "Generic (PLEG): container finished" podID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerID="65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.863676 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerDied","Data":"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.863949 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-zcf2n" event={"ID":"edab77a9-c9b1-44b8-8b21-275fc3bcdd81","Type":"ContainerDied","Data":"9d9637879b810b11d0d0bd4a6a62821363396ee1f0b64b4916b37a0f608e1536"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.864354 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-zcf2n" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.866394 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-77pvn"] Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.867671 4985 generic.go:334] "Generic (PLEG): container finished" podID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" exitCode=0 Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.867707 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerDied","Data":"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.867729 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d5j6f" event={"ID":"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1","Type":"ContainerDied","Data":"57b8a88fd104b2fb3353e1ab6fe7017fb5783c1320e6b0830fb0880b680d0a88"} Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.867784 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d5j6f" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.869023 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "841f3be9-8a92-4e9e-af89-ddf60ffc736e" (UID: "841f3be9-8a92-4e9e-af89-ddf60ffc736e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.890457 4985 scope.go:117] "RemoveContainer" containerID="405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b" Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.890848 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b\": container with ID starting with 405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b not found: ID does not exist" containerID="405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.890897 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b"} err="failed to get container status \"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b\": rpc error: code = NotFound desc = could not find container \"405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b\": container with ID starting with 405235f475530b6fcd27c382bd5feaceeded35a4961933221cd3a88cc01de17b not found: ID does not exist" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.890923 4985 scope.go:117] "RemoveContainer" containerID="65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5" Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.891246 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5\": container with ID starting with 65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5 not found: ID does not exist" containerID="65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.891285 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5"} err="failed to get container status \"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5\": rpc error: code = NotFound desc = could not find container \"65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5\": container with ID starting with 65b0124bb3c53e61e92adc4d43b522d25c014a919c59350d31c2cb18a405a8a5 not found: ID does not exist" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.891313 4985 scope.go:117] "RemoveContainer" containerID="b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.891816 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.895277 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rl7bj"] Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901728 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content\") pod \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901793 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") pod \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901821 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cjkcr\" (UniqueName: \"kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr\") pod \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901867 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") pod \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901930 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities\") pod \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\" (UID: \"5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.901958 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk66t\" (UniqueName: \"kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t\") pod \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\" (UID: \"e1ea9185-aa51-4b82-98ed-b2f028d291b2\") " Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902156 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902172 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902181 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902190 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz6sd\" (UniqueName: \"kubernetes.io/projected/7a26fe5e-9560-455a-a98e-6185e89ee607-kube-api-access-nz6sd\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902200 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/841f3be9-8a92-4e9e-af89-ddf60ffc736e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902213 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z89zm\" (UniqueName: \"kubernetes.io/projected/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-kube-api-access-z89zm\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902222 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xzwjt\" (UniqueName: \"kubernetes.io/projected/841f3be9-8a92-4e9e-af89-ddf60ffc736e-kube-api-access-xzwjt\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.902232 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7a26fe5e-9560-455a-a98e-6185e89ee607-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.904431 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities" (OuterVolumeSpecName: "utilities") pod "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" (UID: "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.905823 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "e1ea9185-aa51-4b82-98ed-b2f028d291b2" (UID: "e1ea9185-aa51-4b82-98ed-b2f028d291b2"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.906704 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "e1ea9185-aa51-4b82-98ed-b2f028d291b2" (UID: "e1ea9185-aa51-4b82-98ed-b2f028d291b2"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.906832 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr" (OuterVolumeSpecName: "kube-api-access-cjkcr") pod "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" (UID: "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1"). InnerVolumeSpecName "kube-api-access-cjkcr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.906961 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t" (OuterVolumeSpecName: "kube-api-access-sk66t") pod "e1ea9185-aa51-4b82-98ed-b2f028d291b2" (UID: "e1ea9185-aa51-4b82-98ed-b2f028d291b2"). InnerVolumeSpecName "kube-api-access-sk66t". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.908392 4985 scope.go:117] "RemoveContainer" containerID="84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.938276 4985 scope.go:117] "RemoveContainer" containerID="c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.975068 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "edab77a9-c9b1-44b8-8b21-275fc3bcdd81" (UID: "edab77a9-c9b1-44b8-8b21-275fc3bcdd81"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.997310 4985 scope.go:117] "RemoveContainer" containerID="b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8" Jan 25 00:11:59 crc kubenswrapper[4985]: E0125 00:11:59.998310 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8\": container with ID starting with b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8 not found: ID does not exist" containerID="b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.998356 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8"} err="failed to get container status \"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8\": rpc error: code = NotFound desc = could not find container \"b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8\": container with ID starting with b359eb48fb7dc09841b9392d88c29132e05bc01733ba5e08b66c40af5d12a0e8 not found: ID does not exist" Jan 25 00:11:59 crc kubenswrapper[4985]: I0125 00:11:59.998381 4985 scope.go:117] "RemoveContainer" containerID="84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.000304 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6\": container with ID starting with 84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6 not found: ID does not exist" containerID="84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.000333 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6"} err="failed to get container status \"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6\": rpc error: code = NotFound desc = could not find container \"84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6\": container with ID starting with 84004ec1b37da1c935e3555456cd483d2ae42526ac01c3263107eff0e75337a6 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.000354 4985 scope.go:117] "RemoveContainer" containerID="c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.000559 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b\": container with ID starting with c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b not found: ID does not exist" containerID="c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.000583 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b"} err="failed to get container status \"c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b\": rpc error: code = NotFound desc = could not find container \"c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b\": container with ID starting with c783582ace5e2ca9589c5935a55083d1cf020cbce2801fcd21b3094fbe91357b not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.000597 4985 scope.go:117] "RemoveContainer" containerID="e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003639 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003663 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk66t\" (UniqueName: \"kubernetes.io/projected/e1ea9185-aa51-4b82-98ed-b2f028d291b2-kube-api-access-sk66t\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003673 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/edab77a9-c9b1-44b8-8b21-275fc3bcdd81-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003683 4985 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003693 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cjkcr\" (UniqueName: \"kubernetes.io/projected/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-kube-api-access-cjkcr\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.003701 4985 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e1ea9185-aa51-4b82-98ed-b2f028d291b2-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.012266 4985 scope.go:117] "RemoveContainer" containerID="f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.027955 4985 scope.go:117] "RemoveContainer" containerID="b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.038148 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" (UID: "5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.060964 4985 scope.go:117] "RemoveContainer" containerID="e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.061493 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13\": container with ID starting with e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13 not found: ID does not exist" containerID="e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.061524 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13"} err="failed to get container status \"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13\": rpc error: code = NotFound desc = could not find container \"e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13\": container with ID starting with e4e1beafea23ac2d99c7a2b826ee6aa37e844c6cc3a39c2fcd0f660e3b52bd13 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.061549 4985 scope.go:117] "RemoveContainer" containerID="f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.061828 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01\": container with ID starting with f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01 not found: ID does not exist" containerID="f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.061852 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01"} err="failed to get container status \"f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01\": rpc error: code = NotFound desc = could not find container \"f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01\": container with ID starting with f18f6324b9fc6b75f36b4a86747a3a102639f44fd4740197f0d859d0c0e0fe01 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.061869 4985 scope.go:117] "RemoveContainer" containerID="b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.062598 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1\": container with ID starting with b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1 not found: ID does not exist" containerID="b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.062641 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1"} err="failed to get container status \"b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1\": rpc error: code = NotFound desc = could not find container \"b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1\": container with ID starting with b5dd1eb0a0fc342dbb99cd5ff3dea91ef4870c6fb22e152c408aaeafe8e911f1 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.062673 4985 scope.go:117] "RemoveContainer" containerID="e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.075936 4985 scope.go:117] "RemoveContainer" containerID="0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.089262 4985 scope.go:117] "RemoveContainer" containerID="7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.105144 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.108203 4985 scope.go:117] "RemoveContainer" containerID="e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.108650 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5\": container with ID starting with e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5 not found: ID does not exist" containerID="e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.108681 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5"} err="failed to get container status \"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5\": rpc error: code = NotFound desc = could not find container \"e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5\": container with ID starting with e8924f7b027489aa74840163eb9ab05bbca149ec8dd4a534bf51a112c0f96ab5 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.108706 4985 scope.go:117] "RemoveContainer" containerID="0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.109041 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8\": container with ID starting with 0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8 not found: ID does not exist" containerID="0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.109083 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8"} err="failed to get container status \"0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8\": rpc error: code = NotFound desc = could not find container \"0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8\": container with ID starting with 0f605c940ad13a7c904d75e6152720e18c0cc73eb5998b1a8b4ab45542d6afc8 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.109126 4985 scope.go:117] "RemoveContainer" containerID="7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.109531 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4\": container with ID starting with 7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4 not found: ID does not exist" containerID="7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.109615 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4"} err="failed to get container status \"7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4\": rpc error: code = NotFound desc = could not find container \"7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4\": container with ID starting with 7bb78067cf47a56786a52f308497260140f11f6f6fca161eacb536e923d31ee4 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.109653 4985 scope.go:117] "RemoveContainer" containerID="65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.122279 4985 scope.go:117] "RemoveContainer" containerID="4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.135339 4985 scope.go:117] "RemoveContainer" containerID="440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.145992 4985 scope.go:117] "RemoveContainer" containerID="65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.146420 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb\": container with ID starting with 65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb not found: ID does not exist" containerID="65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.146447 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb"} err="failed to get container status \"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb\": rpc error: code = NotFound desc = could not find container \"65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb\": container with ID starting with 65e7cec1864eb3d61de7a874972835336daf70b8f55c57b8eb797af178107cbb not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.146471 4985 scope.go:117] "RemoveContainer" containerID="4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.146807 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5\": container with ID starting with 4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5 not found: ID does not exist" containerID="4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.146861 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5"} err="failed to get container status \"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5\": rpc error: code = NotFound desc = could not find container \"4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5\": container with ID starting with 4711ba941777bad3b9994f4f906344823beda79d0a0920dfcdc19877d2413fa5 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.146894 4985 scope.go:117] "RemoveContainer" containerID="440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.147146 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b\": container with ID starting with 440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b not found: ID does not exist" containerID="440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.147165 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b"} err="failed to get container status \"440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b\": rpc error: code = NotFound desc = could not find container \"440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b\": container with ID starting with 440d2b852422606c3cbb845bee9e0dd025642c0f2dc4c33816f967e1146d3b6b not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.147179 4985 scope.go:117] "RemoveContainer" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.160609 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.162772 4985 scope.go:117] "RemoveContainer" containerID="313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.164821 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fwpcj"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.171855 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.175291 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nsrmt"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.195682 4985 scope.go:117] "RemoveContainer" containerID="1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.195805 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.213933 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d5j6f"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.222042 4985 scope.go:117] "RemoveContainer" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.222596 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a\": container with ID starting with 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a not found: ID does not exist" containerID="3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.222642 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a"} err="failed to get container status \"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a\": rpc error: code = NotFound desc = could not find container \"3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a\": container with ID starting with 3f82fba9e360d9bbd38166d069d890f2a6e5e0f634af1d748401519f53491f1a not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.222660 4985 scope.go:117] "RemoveContainer" containerID="313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.223144 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba\": container with ID starting with 313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba not found: ID does not exist" containerID="313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.224007 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba"} err="failed to get container status \"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba\": rpc error: code = NotFound desc = could not find container \"313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba\": container with ID starting with 313e719b5428f6619aab944b8cf6ad6bb41a595f6eba4493a0eaf6914a2953ba not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.224185 4985 scope.go:117] "RemoveContainer" containerID="1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667" Jan 25 00:12:00 crc kubenswrapper[4985]: E0125 00:12:00.224564 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667\": container with ID starting with 1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667 not found: ID does not exist" containerID="1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.224593 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667"} err="failed to get container status \"1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667\": rpc error: code = NotFound desc = could not find container \"1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667\": container with ID starting with 1b438f54de5582883bcedba166ed6d5b619c4299b0f46aab12ed308361250667 not found: ID does not exist" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.225602 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.228067 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-zcf2n"] Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.280036 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" path="/var/lib/kubelet/pods/5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.280862 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" path="/var/lib/kubelet/pods/7a26fe5e-9560-455a-a98e-6185e89ee607/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.281673 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" path="/var/lib/kubelet/pods/841f3be9-8a92-4e9e-af89-ddf60ffc736e/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.283057 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" path="/var/lib/kubelet/pods/badea0b3-377c-4171-931a-2fc2a9a07922/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.283871 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" path="/var/lib/kubelet/pods/e1ea9185-aa51-4b82-98ed-b2f028d291b2/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.285020 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" path="/var/lib/kubelet/pods/edab77a9-c9b1-44b8-8b21-275fc3bcdd81/volumes" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.877035 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" event={"ID":"6a7626ea-b9ae-40c0-a15d-26059903fb75","Type":"ContainerStarted","Data":"1af16342044b39b5493543f07429c4c8a316a23d25b2846aa8140bd263456a80"} Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.878177 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.885169 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" Jan 25 00:12:00 crc kubenswrapper[4985]: I0125 00:12:00.910419 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rb5dx" podStartSLOduration=2.910395507 podStartE2EDuration="2.910395507s" podCreationTimestamp="2026-01-25 00:11:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:12:00.899139082 +0000 UTC m=+330.931075415" watchObservedRunningTime="2026-01-25 00:12:00.910395507 +0000 UTC m=+330.942331800" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.006430 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xpplg"] Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007431 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007446 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007470 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007477 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007496 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007503 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007511 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007525 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007533 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007540 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007555 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007562 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007574 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007581 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007594 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007600 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007616 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007622 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007629 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007635 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007650 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007657 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007672 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007678 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007687 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007693 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="extract-utilities" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007707 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007713 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="extract-content" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007724 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007730 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007737 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007744 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: E0125 00:12:06.007753 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007760 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.007985 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="edab77a9-c9b1-44b8-8b21-275fc3bcdd81" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008012 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="7a26fe5e-9560-455a-a98e-6185e89ee607" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008029 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="badea0b3-377c-4171-931a-2fc2a9a07922" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008041 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b7e13c0-2e33-4cf9-9cec-7b6481b29bd1" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008049 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008061 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ea9185-aa51-4b82-98ed-b2f028d291b2" containerName="marketplace-operator" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.008073 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="841f3be9-8a92-4e9e-af89-ddf60ffc736e" containerName="registry-server" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.009381 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.032941 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.036195 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpplg"] Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.181762 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-catalog-content\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.181835 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-utilities\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.181859 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrt9v\" (UniqueName: \"kubernetes.io/projected/c83724a9-f8b9-4170-993e-55a4801d0b9b-kube-api-access-vrt9v\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.283191 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-utilities\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.283656 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrt9v\" (UniqueName: \"kubernetes.io/projected/c83724a9-f8b9-4170-993e-55a4801d0b9b-kube-api-access-vrt9v\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.283712 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-utilities\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.283766 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-catalog-content\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.284413 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c83724a9-f8b9-4170-993e-55a4801d0b9b-catalog-content\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.303769 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrt9v\" (UniqueName: \"kubernetes.io/projected/c83724a9-f8b9-4170-993e-55a4801d0b9b-kube-api-access-vrt9v\") pod \"certified-operators-xpplg\" (UID: \"c83724a9-f8b9-4170-993e-55a4801d0b9b\") " pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.343949 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.617756 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-r65ns"] Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.619340 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.622989 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.624978 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r65ns"] Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.646876 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xpplg"] Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.790928 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-catalog-content\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.791272 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-utilities\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.791331 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9s68\" (UniqueName: \"kubernetes.io/projected/3f7c4664-a09a-4bf7-a974-06d53cd11f51-kube-api-access-t9s68\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.891984 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-utilities\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.892063 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9s68\" (UniqueName: \"kubernetes.io/projected/3f7c4664-a09a-4bf7-a974-06d53cd11f51-kube-api-access-t9s68\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.892093 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-catalog-content\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.892566 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-utilities\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.892576 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3f7c4664-a09a-4bf7-a974-06d53cd11f51-catalog-content\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.910654 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9s68\" (UniqueName: \"kubernetes.io/projected/3f7c4664-a09a-4bf7-a974-06d53cd11f51-kube-api-access-t9s68\") pod \"community-operators-r65ns\" (UID: \"3f7c4664-a09a-4bf7-a974-06d53cd11f51\") " pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.940711 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.945446 4985 generic.go:334] "Generic (PLEG): container finished" podID="c83724a9-f8b9-4170-993e-55a4801d0b9b" containerID="97e6479fedfcc24cacd899386cd3101f12481c67d6055fa8e39a734d6e1b3a59" exitCode=0 Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.945500 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpplg" event={"ID":"c83724a9-f8b9-4170-993e-55a4801d0b9b","Type":"ContainerDied","Data":"97e6479fedfcc24cacd899386cd3101f12481c67d6055fa8e39a734d6e1b3a59"} Jan 25 00:12:06 crc kubenswrapper[4985]: I0125 00:12:06.945546 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpplg" event={"ID":"c83724a9-f8b9-4170-993e-55a4801d0b9b","Type":"ContainerStarted","Data":"58fbec732adf0c2c34ea235546fd5fe2b9ca9b4d2556139bf6dbc4c1640661f1"} Jan 25 00:12:07 crc kubenswrapper[4985]: I0125 00:12:07.327377 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-r65ns"] Jan 25 00:12:07 crc kubenswrapper[4985]: W0125 00:12:07.337051 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3f7c4664_a09a_4bf7_a974_06d53cd11f51.slice/crio-97d23884779631a814d23345f5d4f8ba58c5acce8fcbbef63c45d497c842b61b WatchSource:0}: Error finding container 97d23884779631a814d23345f5d4f8ba58c5acce8fcbbef63c45d497c842b61b: Status 404 returned error can't find the container with id 97d23884779631a814d23345f5d4f8ba58c5acce8fcbbef63c45d497c842b61b Jan 25 00:12:07 crc kubenswrapper[4985]: I0125 00:12:07.954670 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpplg" event={"ID":"c83724a9-f8b9-4170-993e-55a4801d0b9b","Type":"ContainerStarted","Data":"41124a5af60914a23850d9e344b5ee860b180b7482405ecdc6554a66a3ca624f"} Jan 25 00:12:07 crc kubenswrapper[4985]: I0125 00:12:07.958159 4985 generic.go:334] "Generic (PLEG): container finished" podID="3f7c4664-a09a-4bf7-a974-06d53cd11f51" containerID="eac8be476444f25826498a3a816432381a1b92ffc46cf1052d6f7269f546a702" exitCode=0 Jan 25 00:12:07 crc kubenswrapper[4985]: I0125 00:12:07.958363 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r65ns" event={"ID":"3f7c4664-a09a-4bf7-a974-06d53cd11f51","Type":"ContainerDied","Data":"eac8be476444f25826498a3a816432381a1b92ffc46cf1052d6f7269f546a702"} Jan 25 00:12:07 crc kubenswrapper[4985]: I0125 00:12:07.959646 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r65ns" event={"ID":"3f7c4664-a09a-4bf7-a974-06d53cd11f51","Type":"ContainerStarted","Data":"97d23884779631a814d23345f5d4f8ba58c5acce8fcbbef63c45d497c842b61b"} Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.598622 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d766v"] Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.599507 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.601712 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.615600 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d766v"] Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.716260 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-utilities\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.716697 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9hwl\" (UniqueName: \"kubernetes.io/projected/2ba64434-86dd-4e4c-8586-b55306d5162e-kube-api-access-q9hwl\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.716742 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-catalog-content\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.817039 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9hwl\" (UniqueName: \"kubernetes.io/projected/2ba64434-86dd-4e4c-8586-b55306d5162e-kube-api-access-q9hwl\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.817076 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-catalog-content\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.817147 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-utilities\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.817577 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-utilities\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.817752 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2ba64434-86dd-4e4c-8586-b55306d5162e-catalog-content\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.837332 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9hwl\" (UniqueName: \"kubernetes.io/projected/2ba64434-86dd-4e4c-8586-b55306d5162e-kube-api-access-q9hwl\") pod \"redhat-operators-d766v\" (UID: \"2ba64434-86dd-4e4c-8586-b55306d5162e\") " pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.919233 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.965747 4985 generic.go:334] "Generic (PLEG): container finished" podID="c83724a9-f8b9-4170-993e-55a4801d0b9b" containerID="41124a5af60914a23850d9e344b5ee860b180b7482405ecdc6554a66a3ca624f" exitCode=0 Jan 25 00:12:08 crc kubenswrapper[4985]: I0125 00:12:08.965787 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpplg" event={"ID":"c83724a9-f8b9-4170-993e-55a4801d0b9b","Type":"ContainerDied","Data":"41124a5af60914a23850d9e344b5ee860b180b7482405ecdc6554a66a3ca624f"} Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.210376 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.211967 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.214759 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.215840 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.323279 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crbqg\" (UniqueName: \"kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.323332 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.323398 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.424494 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.424551 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crbqg\" (UniqueName: \"kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.424572 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.425326 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.425527 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.430927 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d766v"] Jan 25 00:12:09 crc kubenswrapper[4985]: W0125 00:12:09.439030 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2ba64434_86dd_4e4c_8586_b55306d5162e.slice/crio-e88c059d8a38fecc8807df4fc8e8b230d4a3a3f78cb594643059cab797520742 WatchSource:0}: Error finding container e88c059d8a38fecc8807df4fc8e8b230d4a3a3f78cb594643059cab797520742: Status 404 returned error can't find the container with id e88c059d8a38fecc8807df4fc8e8b230d4a3a3f78cb594643059cab797520742 Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.453874 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crbqg\" (UniqueName: \"kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg\") pod \"redhat-marketplace-wm845\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.976052 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d766v" event={"ID":"2ba64434-86dd-4e4c-8586-b55306d5162e","Type":"ContainerStarted","Data":"e88c059d8a38fecc8807df4fc8e8b230d4a3a3f78cb594643059cab797520742"} Jan 25 00:12:09 crc kubenswrapper[4985]: I0125 00:12:09.976394 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.383474 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.986056 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xpplg" event={"ID":"c83724a9-f8b9-4170-993e-55a4801d0b9b","Type":"ContainerStarted","Data":"d8edb1443dcfe8697be6ee8d40a6c9506682a6e0a1cb67ad462a21b003dbefd5"} Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.988809 4985 generic.go:334] "Generic (PLEG): container finished" podID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerID="0d2c13cb9625b31bbe6f1be3eb4b5edd5eeecf50d921350ad1086a6c90b26de4" exitCode=0 Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.988904 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerDied","Data":"0d2c13cb9625b31bbe6f1be3eb4b5edd5eeecf50d921350ad1086a6c90b26de4"} Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.988936 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerStarted","Data":"b4ff5dab6c3cf8242f82510b732dbb08eb2b36009a67d620c35943e6696dba50"} Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.990723 4985 generic.go:334] "Generic (PLEG): container finished" podID="3f7c4664-a09a-4bf7-a974-06d53cd11f51" containerID="b832c4e1e5197951d7b26e93974374dcc32b0e12eae4c6d88ff9eeab13b5ce73" exitCode=0 Jan 25 00:12:10 crc kubenswrapper[4985]: I0125 00:12:10.990835 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r65ns" event={"ID":"3f7c4664-a09a-4bf7-a974-06d53cd11f51","Type":"ContainerDied","Data":"b832c4e1e5197951d7b26e93974374dcc32b0e12eae4c6d88ff9eeab13b5ce73"} Jan 25 00:12:11 crc kubenswrapper[4985]: I0125 00:12:11.005865 4985 generic.go:334] "Generic (PLEG): container finished" podID="2ba64434-86dd-4e4c-8586-b55306d5162e" containerID="429772c6b4e6b8baf9df9b77d8ec25014bddf54757106f9e8e08f8e9b5b7ced5" exitCode=0 Jan 25 00:12:11 crc kubenswrapper[4985]: I0125 00:12:11.005934 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d766v" event={"ID":"2ba64434-86dd-4e4c-8586-b55306d5162e","Type":"ContainerDied","Data":"429772c6b4e6b8baf9df9b77d8ec25014bddf54757106f9e8e08f8e9b5b7ced5"} Jan 25 00:12:11 crc kubenswrapper[4985]: I0125 00:12:11.013400 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xpplg" podStartSLOduration=3.48622639 podStartE2EDuration="6.013375772s" podCreationTimestamp="2026-01-25 00:12:05 +0000 UTC" firstStartedPulling="2026-01-25 00:12:06.948216871 +0000 UTC m=+336.980153144" lastFinishedPulling="2026-01-25 00:12:09.475366253 +0000 UTC m=+339.507302526" observedRunningTime="2026-01-25 00:12:11.01087128 +0000 UTC m=+341.042807553" watchObservedRunningTime="2026-01-25 00:12:11.013375772 +0000 UTC m=+341.045312075" Jan 25 00:12:12 crc kubenswrapper[4985]: I0125 00:12:12.016668 4985 generic.go:334] "Generic (PLEG): container finished" podID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerID="b8b61bf587ca504d77fed49e2251515a24afcf8399fde7459a1f30020dcb711b" exitCode=0 Jan 25 00:12:12 crc kubenswrapper[4985]: I0125 00:12:12.016771 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerDied","Data":"b8b61bf587ca504d77fed49e2251515a24afcf8399fde7459a1f30020dcb711b"} Jan 25 00:12:12 crc kubenswrapper[4985]: I0125 00:12:12.021358 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d766v" event={"ID":"2ba64434-86dd-4e4c-8586-b55306d5162e","Type":"ContainerStarted","Data":"10117429eb6d1429822aa97905a9c4314c961a1447847354b646d6cc8ed27e36"} Jan 25 00:12:12 crc kubenswrapper[4985]: I0125 00:12:12.028722 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-r65ns" event={"ID":"3f7c4664-a09a-4bf7-a974-06d53cd11f51","Type":"ContainerStarted","Data":"07724df2875295571e19381f7084d25000691d33a4a7fc91fe799928ec9ea79d"} Jan 25 00:12:12 crc kubenswrapper[4985]: I0125 00:12:12.060752 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-r65ns" podStartSLOduration=2.645204241 podStartE2EDuration="6.060731372s" podCreationTimestamp="2026-01-25 00:12:06 +0000 UTC" firstStartedPulling="2026-01-25 00:12:07.959876203 +0000 UTC m=+337.991812506" lastFinishedPulling="2026-01-25 00:12:11.375403364 +0000 UTC m=+341.407339637" observedRunningTime="2026-01-25 00:12:12.058324793 +0000 UTC m=+342.090261086" watchObservedRunningTime="2026-01-25 00:12:12.060731372 +0000 UTC m=+342.092667665" Jan 25 00:12:13 crc kubenswrapper[4985]: I0125 00:12:13.036915 4985 generic.go:334] "Generic (PLEG): container finished" podID="2ba64434-86dd-4e4c-8586-b55306d5162e" containerID="10117429eb6d1429822aa97905a9c4314c961a1447847354b646d6cc8ed27e36" exitCode=0 Jan 25 00:12:13 crc kubenswrapper[4985]: I0125 00:12:13.036990 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d766v" event={"ID":"2ba64434-86dd-4e4c-8586-b55306d5162e","Type":"ContainerDied","Data":"10117429eb6d1429822aa97905a9c4314c961a1447847354b646d6cc8ed27e36"} Jan 25 00:12:13 crc kubenswrapper[4985]: I0125 00:12:13.041254 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerStarted","Data":"7f040f5f1a3648ae30777f435cc7a040c573d3494ad068188d8a61cdedd89fc1"} Jan 25 00:12:13 crc kubenswrapper[4985]: I0125 00:12:13.089489 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wm845" podStartSLOduration=2.640444801 podStartE2EDuration="4.089464516s" podCreationTimestamp="2026-01-25 00:12:09 +0000 UTC" firstStartedPulling="2026-01-25 00:12:10.990724659 +0000 UTC m=+341.022660932" lastFinishedPulling="2026-01-25 00:12:12.439744344 +0000 UTC m=+342.471680647" observedRunningTime="2026-01-25 00:12:13.076616726 +0000 UTC m=+343.108553009" watchObservedRunningTime="2026-01-25 00:12:13.089464516 +0000 UTC m=+343.121400829" Jan 25 00:12:15 crc kubenswrapper[4985]: I0125 00:12:15.056059 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d766v" event={"ID":"2ba64434-86dd-4e4c-8586-b55306d5162e","Type":"ContainerStarted","Data":"9c48432108d421f2de073db13c5394aed906396f65d61099c42d07169ae1e180"} Jan 25 00:12:15 crc kubenswrapper[4985]: I0125 00:12:15.079390 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d766v" podStartSLOduration=4.128286329 podStartE2EDuration="7.079364786s" podCreationTimestamp="2026-01-25 00:12:08 +0000 UTC" firstStartedPulling="2026-01-25 00:12:11.007298117 +0000 UTC m=+341.039234390" lastFinishedPulling="2026-01-25 00:12:13.958376574 +0000 UTC m=+343.990312847" observedRunningTime="2026-01-25 00:12:15.07147256 +0000 UTC m=+345.103408923" watchObservedRunningTime="2026-01-25 00:12:15.079364786 +0000 UTC m=+345.111301089" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.344190 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.344253 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.441796 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.941698 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.941747 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:16 crc kubenswrapper[4985]: I0125 00:12:16.994422 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:17 crc kubenswrapper[4985]: I0125 00:12:17.130201 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xpplg" Jan 25 00:12:17 crc kubenswrapper[4985]: I0125 00:12:17.135884 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-r65ns" Jan 25 00:12:18 crc kubenswrapper[4985]: I0125 00:12:18.920202 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:18 crc kubenswrapper[4985]: I0125 00:12:18.920246 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:19 crc kubenswrapper[4985]: I0125 00:12:19.963668 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-d766v" podUID="2ba64434-86dd-4e4c-8586-b55306d5162e" containerName="registry-server" probeResult="failure" output=< Jan 25 00:12:19 crc kubenswrapper[4985]: timeout: failed to connect service ":50051" within 1s Jan 25 00:12:19 crc kubenswrapper[4985]: > Jan 25 00:12:19 crc kubenswrapper[4985]: I0125 00:12:19.977702 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:19 crc kubenswrapper[4985]: I0125 00:12:19.977757 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:20 crc kubenswrapper[4985]: I0125 00:12:20.016723 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:20 crc kubenswrapper[4985]: I0125 00:12:20.142220 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:12:26 crc kubenswrapper[4985]: I0125 00:12:26.953864 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h2zqn"] Jan 25 00:12:26 crc kubenswrapper[4985]: I0125 00:12:26.955073 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:26 crc kubenswrapper[4985]: I0125 00:12:26.965613 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h2zqn"] Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013424 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-registry-tls\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013497 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a50b3fa6-b800-4345-b90e-248baeb97599-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013598 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-trusted-ca\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013635 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zx8lx\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-kube-api-access-zx8lx\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013657 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-bound-sa-token\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013767 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013789 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a50b3fa6-b800-4345-b90e-248baeb97599-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.013812 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-registry-certificates\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.030517 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.115592 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a50b3fa6-b800-4345-b90e-248baeb97599-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.115955 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-registry-certificates\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.115987 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-registry-tls\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.116053 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a50b3fa6-b800-4345-b90e-248baeb97599-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.116100 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-trusted-ca\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.116195 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zx8lx\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-kube-api-access-zx8lx\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.116226 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-bound-sa-token\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.116524 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/a50b3fa6-b800-4345-b90e-248baeb97599-ca-trust-extracted\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.117438 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-registry-certificates\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.118416 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a50b3fa6-b800-4345-b90e-248baeb97599-trusted-ca\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.121245 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-registry-tls\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.121300 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/a50b3fa6-b800-4345-b90e-248baeb97599-installation-pull-secrets\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.130782 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zx8lx\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-kube-api-access-zx8lx\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.135003 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a50b3fa6-b800-4345-b90e-248baeb97599-bound-sa-token\") pod \"image-registry-66df7c8f76-h2zqn\" (UID: \"a50b3fa6-b800-4345-b90e-248baeb97599\") " pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.271590 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:27 crc kubenswrapper[4985]: I0125 00:12:27.757586 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-h2zqn"] Jan 25 00:12:27 crc kubenswrapper[4985]: W0125 00:12:27.768360 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda50b3fa6_b800_4345_b90e_248baeb97599.slice/crio-fb253fcf11507f854d846c1351ad30a2591a922b93746ca6b2d1194304bf6c38 WatchSource:0}: Error finding container fb253fcf11507f854d846c1351ad30a2591a922b93746ca6b2d1194304bf6c38: Status 404 returned error can't find the container with id fb253fcf11507f854d846c1351ad30a2591a922b93746ca6b2d1194304bf6c38 Jan 25 00:12:28 crc kubenswrapper[4985]: I0125 00:12:28.135469 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" event={"ID":"a50b3fa6-b800-4345-b90e-248baeb97599","Type":"ContainerStarted","Data":"fb253fcf11507f854d846c1351ad30a2591a922b93746ca6b2d1194304bf6c38"} Jan 25 00:12:28 crc kubenswrapper[4985]: I0125 00:12:28.964371 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:29 crc kubenswrapper[4985]: I0125 00:12:29.004374 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d766v" Jan 25 00:12:32 crc kubenswrapper[4985]: I0125 00:12:32.162648 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" event={"ID":"a50b3fa6-b800-4345-b90e-248baeb97599","Type":"ContainerStarted","Data":"6bb7aebcb06423508ed9c435292a84a3f344f805dbc701747a93bc9aafe05764"} Jan 25 00:12:32 crc kubenswrapper[4985]: I0125 00:12:32.163144 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:32 crc kubenswrapper[4985]: I0125 00:12:32.197617 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" podStartSLOduration=6.197593092 podStartE2EDuration="6.197593092s" podCreationTimestamp="2026-01-25 00:12:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:12:32.19543587 +0000 UTC m=+362.227372183" watchObservedRunningTime="2026-01-25 00:12:32.197593092 +0000 UTC m=+362.229529405" Jan 25 00:12:32 crc kubenswrapper[4985]: I0125 00:12:32.836219 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:12:32 crc kubenswrapper[4985]: I0125 00:12:32.836510 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerName="controller-manager" containerID="cri-o://5e49c26950f10cc35508e0372006e08121988998b908f4c51ec83116889ad1e0" gracePeriod=30 Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.175362 4985 generic.go:334] "Generic (PLEG): container finished" podID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerID="5e49c26950f10cc35508e0372006e08121988998b908f4c51ec83116889ad1e0" exitCode=0 Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.175444 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" event={"ID":"8324e2d0-7f92-4cb7-80cd-ead91932d128","Type":"ContainerDied","Data":"5e49c26950f10cc35508e0372006e08121988998b908f4c51ec83116889ad1e0"} Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.302415 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.400805 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-szdmg\" (UniqueName: \"kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg\") pod \"8324e2d0-7f92-4cb7-80cd-ead91932d128\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.400920 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert\") pod \"8324e2d0-7f92-4cb7-80cd-ead91932d128\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.400975 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config\") pod \"8324e2d0-7f92-4cb7-80cd-ead91932d128\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.401010 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca\") pod \"8324e2d0-7f92-4cb7-80cd-ead91932d128\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.401052 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles\") pod \"8324e2d0-7f92-4cb7-80cd-ead91932d128\" (UID: \"8324e2d0-7f92-4cb7-80cd-ead91932d128\") " Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.402159 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "8324e2d0-7f92-4cb7-80cd-ead91932d128" (UID: "8324e2d0-7f92-4cb7-80cd-ead91932d128"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.402308 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config" (OuterVolumeSpecName: "config") pod "8324e2d0-7f92-4cb7-80cd-ead91932d128" (UID: "8324e2d0-7f92-4cb7-80cd-ead91932d128"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.402772 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca" (OuterVolumeSpecName: "client-ca") pod "8324e2d0-7f92-4cb7-80cd-ead91932d128" (UID: "8324e2d0-7f92-4cb7-80cd-ead91932d128"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.407287 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8324e2d0-7f92-4cb7-80cd-ead91932d128" (UID: "8324e2d0-7f92-4cb7-80cd-ead91932d128"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.407291 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg" (OuterVolumeSpecName: "kube-api-access-szdmg") pod "8324e2d0-7f92-4cb7-80cd-ead91932d128" (UID: "8324e2d0-7f92-4cb7-80cd-ead91932d128"). InnerVolumeSpecName "kube-api-access-szdmg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.503027 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8324e2d0-7f92-4cb7-80cd-ead91932d128-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.503080 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.503098 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.503149 4985 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/8324e2d0-7f92-4cb7-80cd-ead91932d128-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:33 crc kubenswrapper[4985]: I0125 00:12:33.503175 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-szdmg\" (UniqueName: \"kubernetes.io/projected/8324e2d0-7f92-4cb7-80cd-ead91932d128-kube-api-access-szdmg\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.185772 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" event={"ID":"8324e2d0-7f92-4cb7-80cd-ead91932d128","Type":"ContainerDied","Data":"cbcfb784f71b6c110fafd3c4f72ec50712986b662081064b6beda30d5f251d1e"} Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.185886 4985 scope.go:117] "RemoveContainer" containerID="5e49c26950f10cc35508e0372006e08121988998b908f4c51ec83116889ad1e0" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.185957 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.197345 4985 patch_prober.go:28] interesting pod/controller-manager-f7fc999fd-9hhhf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: i/o timeout" start-of-body= Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.197433 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-f7fc999fd-9hhhf" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.63:8443/healthz\": dial tcp 10.217.0.63:8443: i/o timeout" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.257640 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.265555 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-f7fc999fd-9hhhf"] Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.282669 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" path="/var/lib/kubelet/pods/8324e2d0-7f92-4cb7-80cd-ead91932d128/volumes" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.624423 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-59c444b8c5-667sr"] Jan 25 00:12:34 crc kubenswrapper[4985]: E0125 00:12:34.624849 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerName="controller-manager" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.624892 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerName="controller-manager" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.625185 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="8324e2d0-7f92-4cb7-80cd-ead91932d128" containerName="controller-manager" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.626784 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.629841 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.630288 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.630487 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.631070 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.631494 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.637408 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.644445 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.647728 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59c444b8c5-667sr"] Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.823581 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-config\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.823716 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29f911ef-7dc3-457f-ae08-a29e1cea331a-serving-cert\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.823766 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x7qhs\" (UniqueName: \"kubernetes.io/projected/29f911ef-7dc3-457f-ae08-a29e1cea331a-kube-api-access-x7qhs\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.823793 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-proxy-ca-bundles\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.823821 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-client-ca\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.925563 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29f911ef-7dc3-457f-ae08-a29e1cea331a-serving-cert\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.925667 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x7qhs\" (UniqueName: \"kubernetes.io/projected/29f911ef-7dc3-457f-ae08-a29e1cea331a-kube-api-access-x7qhs\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.925710 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-proxy-ca-bundles\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.925759 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-client-ca\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.925845 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-config\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.928889 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-proxy-ca-bundles\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.928892 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-client-ca\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.929437 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29f911ef-7dc3-457f-ae08-a29e1cea331a-config\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.930447 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29f911ef-7dc3-457f-ae08-a29e1cea331a-serving-cert\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:34 crc kubenswrapper[4985]: I0125 00:12:34.956345 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x7qhs\" (UniqueName: \"kubernetes.io/projected/29f911ef-7dc3-457f-ae08-a29e1cea331a-kube-api-access-x7qhs\") pod \"controller-manager-59c444b8c5-667sr\" (UID: \"29f911ef-7dc3-457f-ae08-a29e1cea331a\") " pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:35 crc kubenswrapper[4985]: I0125 00:12:35.251394 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:35 crc kubenswrapper[4985]: I0125 00:12:35.566483 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-59c444b8c5-667sr"] Jan 25 00:12:35 crc kubenswrapper[4985]: I0125 00:12:35.835801 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:12:35 crc kubenswrapper[4985]: I0125 00:12:35.836196 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:12:36 crc kubenswrapper[4985]: I0125 00:12:36.201556 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" event={"ID":"29f911ef-7dc3-457f-ae08-a29e1cea331a","Type":"ContainerStarted","Data":"c14410a403b5f1f9da54b79ca16182f84782e71640ccbaa831def390770c57ec"} Jan 25 00:12:36 crc kubenswrapper[4985]: I0125 00:12:36.201620 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" event={"ID":"29f911ef-7dc3-457f-ae08-a29e1cea331a","Type":"ContainerStarted","Data":"d8f3d5c723f70c9fc195f7ba4fdc0a7a32353de5aaaa3c3873c265939852d03e"} Jan 25 00:12:36 crc kubenswrapper[4985]: I0125 00:12:36.201996 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:36 crc kubenswrapper[4985]: I0125 00:12:36.207346 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" Jan 25 00:12:36 crc kubenswrapper[4985]: I0125 00:12:36.227041 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-59c444b8c5-667sr" podStartSLOduration=4.227014902 podStartE2EDuration="4.227014902s" podCreationTimestamp="2026-01-25 00:12:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:12:36.222872113 +0000 UTC m=+366.254808456" watchObservedRunningTime="2026-01-25 00:12:36.227014902 +0000 UTC m=+366.258951225" Jan 25 00:12:47 crc kubenswrapper[4985]: I0125 00:12:47.280561 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-h2zqn" Jan 25 00:12:47 crc kubenswrapper[4985]: I0125 00:12:47.349274 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:12:52 crc kubenswrapper[4985]: I0125 00:12:52.852738 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:12:52 crc kubenswrapper[4985]: I0125 00:12:52.853673 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" podUID="8e37f6bb-e178-4724-9320-a4add7248799" containerName="route-controller-manager" containerID="cri-o://74d0f7e4d65923ce1a62d074eaf0907f9f0e193a7a0bfd7a8eb3e1bb0400ba85" gracePeriod=30 Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.303409 4985 generic.go:334] "Generic (PLEG): container finished" podID="8e37f6bb-e178-4724-9320-a4add7248799" containerID="74d0f7e4d65923ce1a62d074eaf0907f9f0e193a7a0bfd7a8eb3e1bb0400ba85" exitCode=0 Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.303449 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" event={"ID":"8e37f6bb-e178-4724-9320-a4add7248799","Type":"ContainerDied","Data":"74d0f7e4d65923ce1a62d074eaf0907f9f0e193a7a0bfd7a8eb3e1bb0400ba85"} Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.905271 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.968247 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb"] Jan 25 00:12:53 crc kubenswrapper[4985]: E0125 00:12:53.968564 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e37f6bb-e178-4724-9320-a4add7248799" containerName="route-controller-manager" Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.968580 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e37f6bb-e178-4724-9320-a4add7248799" containerName="route-controller-manager" Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.968705 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e37f6bb-e178-4724-9320-a4add7248799" containerName="route-controller-manager" Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.969219 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:53 crc kubenswrapper[4985]: I0125 00:12:53.973264 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb"] Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.000348 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert\") pod \"8e37f6bb-e178-4724-9320-a4add7248799\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.000447 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wd8dt\" (UniqueName: \"kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt\") pod \"8e37f6bb-e178-4724-9320-a4add7248799\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.000472 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca\") pod \"8e37f6bb-e178-4724-9320-a4add7248799\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.000545 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config\") pod \"8e37f6bb-e178-4724-9320-a4add7248799\" (UID: \"8e37f6bb-e178-4724-9320-a4add7248799\") " Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.001570 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config" (OuterVolumeSpecName: "config") pod "8e37f6bb-e178-4724-9320-a4add7248799" (UID: "8e37f6bb-e178-4724-9320-a4add7248799"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.003945 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca" (OuterVolumeSpecName: "client-ca") pod "8e37f6bb-e178-4724-9320-a4add7248799" (UID: "8e37f6bb-e178-4724-9320-a4add7248799"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.007900 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt" (OuterVolumeSpecName: "kube-api-access-wd8dt") pod "8e37f6bb-e178-4724-9320-a4add7248799" (UID: "8e37f6bb-e178-4724-9320-a4add7248799"). InnerVolumeSpecName "kube-api-access-wd8dt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.008321 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8e37f6bb-e178-4724-9320-a4add7248799" (UID: "8e37f6bb-e178-4724-9320-a4add7248799"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102019 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-client-ca\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102210 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-config\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102253 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdswp\" (UniqueName: \"kubernetes.io/projected/44d3535e-6012-4e8f-8d60-f22e737a1ccc-kube-api-access-qdswp\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102293 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d3535e-6012-4e8f-8d60-f22e737a1ccc-serving-cert\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102359 4985 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e37f6bb-e178-4724-9320-a4add7248799-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102381 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wd8dt\" (UniqueName: \"kubernetes.io/projected/8e37f6bb-e178-4724-9320-a4add7248799-kube-api-access-wd8dt\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102401 4985 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-client-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.102419 4985 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8e37f6bb-e178-4724-9320-a4add7248799-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.204492 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-config\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.205180 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdswp\" (UniqueName: \"kubernetes.io/projected/44d3535e-6012-4e8f-8d60-f22e737a1ccc-kube-api-access-qdswp\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.205450 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d3535e-6012-4e8f-8d60-f22e737a1ccc-serving-cert\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.205930 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-client-ca\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.206061 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-config\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.207373 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/44d3535e-6012-4e8f-8d60-f22e737a1ccc-client-ca\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.211247 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/44d3535e-6012-4e8f-8d60-f22e737a1ccc-serving-cert\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.234506 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdswp\" (UniqueName: \"kubernetes.io/projected/44d3535e-6012-4e8f-8d60-f22e737a1ccc-kube-api-access-qdswp\") pod \"route-controller-manager-75f6f8944-mj9tb\" (UID: \"44d3535e-6012-4e8f-8d60-f22e737a1ccc\") " pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.288575 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.313395 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" event={"ID":"8e37f6bb-e178-4724-9320-a4add7248799","Type":"ContainerDied","Data":"65e005cc09ecc5c19e6cfee133d9a3d1684d357e24135b1f66c841ac732e01d4"} Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.313477 4985 scope.go:117] "RemoveContainer" containerID="74d0f7e4d65923ce1a62d074eaf0907f9f0e193a7a0bfd7a8eb3e1bb0400ba85" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.313475 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf" Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.370215 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.373749 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7b66f58d44-j64wf"] Jan 25 00:12:54 crc kubenswrapper[4985]: I0125 00:12:54.776685 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb"] Jan 25 00:12:54 crc kubenswrapper[4985]: W0125 00:12:54.780139 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44d3535e_6012_4e8f_8d60_f22e737a1ccc.slice/crio-9ad26ac999c2eb1ae3cfbb8766d84388d93d06bfda9e0b89be5e5814037edae4 WatchSource:0}: Error finding container 9ad26ac999c2eb1ae3cfbb8766d84388d93d06bfda9e0b89be5e5814037edae4: Status 404 returned error can't find the container with id 9ad26ac999c2eb1ae3cfbb8766d84388d93d06bfda9e0b89be5e5814037edae4 Jan 25 00:12:55 crc kubenswrapper[4985]: I0125 00:12:55.322693 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" event={"ID":"44d3535e-6012-4e8f-8d60-f22e737a1ccc","Type":"ContainerStarted","Data":"2bb709f0367ab148a4bd0af3fbea5c0b02333095e8f1583721d9f3881dc38b3d"} Jan 25 00:12:55 crc kubenswrapper[4985]: I0125 00:12:55.323275 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" event={"ID":"44d3535e-6012-4e8f-8d60-f22e737a1ccc","Type":"ContainerStarted","Data":"9ad26ac999c2eb1ae3cfbb8766d84388d93d06bfda9e0b89be5e5814037edae4"} Jan 25 00:12:55 crc kubenswrapper[4985]: I0125 00:12:55.323327 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:55 crc kubenswrapper[4985]: I0125 00:12:55.333094 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" Jan 25 00:12:55 crc kubenswrapper[4985]: I0125 00:12:55.352579 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-75f6f8944-mj9tb" podStartSLOduration=3.35254759 podStartE2EDuration="3.35254759s" podCreationTimestamp="2026-01-25 00:12:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:12:55.349863192 +0000 UTC m=+385.381799505" watchObservedRunningTime="2026-01-25 00:12:55.35254759 +0000 UTC m=+385.384483903" Jan 25 00:12:56 crc kubenswrapper[4985]: I0125 00:12:56.287359 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e37f6bb-e178-4724-9320-a4add7248799" path="/var/lib/kubelet/pods/8e37f6bb-e178-4724-9320-a4add7248799/volumes" Jan 25 00:13:05 crc kubenswrapper[4985]: I0125 00:13:05.835717 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:13:05 crc kubenswrapper[4985]: I0125 00:13:05.837784 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:13:12 crc kubenswrapper[4985]: I0125 00:13:12.389343 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" podUID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" containerName="registry" containerID="cri-o://cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80" gracePeriod=30 Jan 25 00:13:12 crc kubenswrapper[4985]: I0125 00:13:12.911562 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014402 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014644 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014687 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014745 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014776 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014823 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014857 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz66m\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.014914 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets\") pod \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\" (UID: \"e3a56e76-0d21-4576-91ec-87099bd8f5e9\") " Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.015690 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.015874 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.021750 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.022140 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.023081 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.023891 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m" (OuterVolumeSpecName: "kube-api-access-xz66m") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "kube-api-access-xz66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.026950 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.056663 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "e3a56e76-0d21-4576-91ec-87099bd8f5e9" (UID: "e3a56e76-0d21-4576-91ec-87099bd8f5e9"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117326 4985 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117424 4985 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e3a56e76-0d21-4576-91ec-87099bd8f5e9-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117449 4985 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117471 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz66m\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-kube-api-access-xz66m\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117493 4985 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/e3a56e76-0d21-4576-91ec-87099bd8f5e9-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117512 4985 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/e3a56e76-0d21-4576-91ec-87099bd8f5e9-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.117530 4985 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/e3a56e76-0d21-4576-91ec-87099bd8f5e9-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.447812 4985 generic.go:334] "Generic (PLEG): container finished" podID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" containerID="cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80" exitCode=0 Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.447898 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" event={"ID":"e3a56e76-0d21-4576-91ec-87099bd8f5e9","Type":"ContainerDied","Data":"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80"} Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.447949 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.447983 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-l8stl" event={"ID":"e3a56e76-0d21-4576-91ec-87099bd8f5e9","Type":"ContainerDied","Data":"88ade9b0be0da3fb263b6acd2bfc28adac13ccc5aeb7954b5d52719606cab2b2"} Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.448019 4985 scope.go:117] "RemoveContainer" containerID="cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.479562 4985 scope.go:117] "RemoveContainer" containerID="cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80" Jan 25 00:13:13 crc kubenswrapper[4985]: E0125 00:13:13.480378 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80\": container with ID starting with cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80 not found: ID does not exist" containerID="cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.480436 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80"} err="failed to get container status \"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80\": rpc error: code = NotFound desc = could not find container \"cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80\": container with ID starting with cbc707b1ecdf26c4a61bbfbc3a2d41922a0e46451baa423131add712153c1b80 not found: ID does not exist" Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.505756 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:13:13 crc kubenswrapper[4985]: I0125 00:13:13.513605 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-l8stl"] Jan 25 00:13:14 crc kubenswrapper[4985]: I0125 00:13:14.287233 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" path="/var/lib/kubelet/pods/e3a56e76-0d21-4576-91ec-87099bd8f5e9/volumes" Jan 25 00:13:24 crc kubenswrapper[4985]: I0125 00:13:24.310851 4985 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod8e37f6bb-e178-4724-9320-a4add7248799"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod8e37f6bb-e178-4724-9320-a4add7248799] : Timed out while waiting for systemd to remove kubepods-burstable-pod8e37f6bb_e178_4724_9320_a4add7248799.slice" Jan 25 00:13:35 crc kubenswrapper[4985]: I0125 00:13:35.836280 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:13:35 crc kubenswrapper[4985]: I0125 00:13:35.836920 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:13:35 crc kubenswrapper[4985]: I0125 00:13:35.836981 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:13:35 crc kubenswrapper[4985]: I0125 00:13:35.837819 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:13:35 crc kubenswrapper[4985]: I0125 00:13:35.837923 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f" gracePeriod=600 Jan 25 00:13:36 crc kubenswrapper[4985]: I0125 00:13:36.611861 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f" exitCode=0 Jan 25 00:13:36 crc kubenswrapper[4985]: I0125 00:13:36.611998 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f"} Jan 25 00:13:36 crc kubenswrapper[4985]: I0125 00:13:36.612227 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd"} Jan 25 00:13:36 crc kubenswrapper[4985]: I0125 00:13:36.612256 4985 scope.go:117] "RemoveContainer" containerID="03184912c3e41a26c7568427ebf13b617f8737de35b189eb5abd4c39d1b02585" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.206959 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf"] Jan 25 00:15:00 crc kubenswrapper[4985]: E0125 00:15:00.207801 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" containerName="registry" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.207817 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" containerName="registry" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.207937 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3a56e76-0d21-4576-91ec-87099bd8f5e9" containerName="registry" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.208429 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.210889 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.211435 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.224412 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf"] Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.333847 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.333923 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nqmw\" (UniqueName: \"kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.333996 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.435812 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.435905 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nqmw\" (UniqueName: \"kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.436001 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.437810 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.444600 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.469739 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nqmw\" (UniqueName: \"kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw\") pod \"collect-profiles-29488335-f8rjf\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.534598 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:00 crc kubenswrapper[4985]: I0125 00:15:00.804161 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf"] Jan 25 00:15:01 crc kubenswrapper[4985]: I0125 00:15:01.164376 4985 generic.go:334] "Generic (PLEG): container finished" podID="1d658bad-5773-4f96-a342-0521453eb299" containerID="8f1c6469e5d2b93d6cbbab1167f6ed2119e5166c54801901cd24ffe4a500c334" exitCode=0 Jan 25 00:15:01 crc kubenswrapper[4985]: I0125 00:15:01.164509 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" event={"ID":"1d658bad-5773-4f96-a342-0521453eb299","Type":"ContainerDied","Data":"8f1c6469e5d2b93d6cbbab1167f6ed2119e5166c54801901cd24ffe4a500c334"} Jan 25 00:15:01 crc kubenswrapper[4985]: I0125 00:15:01.164670 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" event={"ID":"1d658bad-5773-4f96-a342-0521453eb299","Type":"ContainerStarted","Data":"5ce167678c0a92fe069251087ac4c49150483ac8062d25f84aadbb73d9312607"} Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.496012 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.565759 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume\") pod \"1d658bad-5773-4f96-a342-0521453eb299\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.566003 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nqmw\" (UniqueName: \"kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw\") pod \"1d658bad-5773-4f96-a342-0521453eb299\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.566041 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume\") pod \"1d658bad-5773-4f96-a342-0521453eb299\" (UID: \"1d658bad-5773-4f96-a342-0521453eb299\") " Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.566647 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume" (OuterVolumeSpecName: "config-volume") pod "1d658bad-5773-4f96-a342-0521453eb299" (UID: "1d658bad-5773-4f96-a342-0521453eb299"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.574716 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw" (OuterVolumeSpecName: "kube-api-access-6nqmw") pod "1d658bad-5773-4f96-a342-0521453eb299" (UID: "1d658bad-5773-4f96-a342-0521453eb299"). InnerVolumeSpecName "kube-api-access-6nqmw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.575348 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "1d658bad-5773-4f96-a342-0521453eb299" (UID: "1d658bad-5773-4f96-a342-0521453eb299"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.667705 4985 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/1d658bad-5773-4f96-a342-0521453eb299-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.667863 4985 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/1d658bad-5773-4f96-a342-0521453eb299-config-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:15:02 crc kubenswrapper[4985]: I0125 00:15:02.667921 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nqmw\" (UniqueName: \"kubernetes.io/projected/1d658bad-5773-4f96-a342-0521453eb299-kube-api-access-6nqmw\") on node \"crc\" DevicePath \"\"" Jan 25 00:15:03 crc kubenswrapper[4985]: I0125 00:15:03.183801 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" event={"ID":"1d658bad-5773-4f96-a342-0521453eb299","Type":"ContainerDied","Data":"5ce167678c0a92fe069251087ac4c49150483ac8062d25f84aadbb73d9312607"} Jan 25 00:15:03 crc kubenswrapper[4985]: I0125 00:15:03.183906 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5ce167678c0a92fe069251087ac4c49150483ac8062d25f84aadbb73d9312607" Jan 25 00:15:03 crc kubenswrapper[4985]: I0125 00:15:03.183928 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488335-f8rjf" Jan 25 00:16:05 crc kubenswrapper[4985]: I0125 00:16:05.836831 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:16:05 crc kubenswrapper[4985]: I0125 00:16:05.837728 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:16:30 crc kubenswrapper[4985]: I0125 00:16:30.593726 4985 scope.go:117] "RemoveContainer" containerID="fc7b55087a83daae5a0b67ace5ea8c7b18c43d373288f43d5e6c5c0fbc196d8d" Jan 25 00:16:35 crc kubenswrapper[4985]: I0125 00:16:35.836260 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:16:35 crc kubenswrapper[4985]: I0125 00:16:35.836660 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.095338 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cc28q"] Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.096714 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="nbdb" containerID="cri-o://dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.096850 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="sbdb" containerID="cri-o://94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.097213 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.097263 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-acl-logging" containerID="cri-o://dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.097245 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-node" containerID="cri-o://2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.097218 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="northd" containerID="cri-o://eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.097475 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-controller" containerID="cri-o://c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.156221 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" containerID="cri-o://a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" gracePeriod=30 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.445300 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/3.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.447343 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovn-acl-logging/0.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.448185 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovn-controller/0.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.449732 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.518499 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-w2bkq"] Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.518969 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519020 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519040 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-acl-logging" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519054 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-acl-logging" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519074 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519087 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519133 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-node" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519146 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-node" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519159 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="northd" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519171 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="northd" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519189 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kubecfg-setup" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519201 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kubecfg-setup" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519223 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-ovn-metrics" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519235 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-ovn-metrics" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519254 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d658bad-5773-4f96-a342-0521453eb299" containerName="collect-profiles" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519269 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d658bad-5773-4f96-a342-0521453eb299" containerName="collect-profiles" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519287 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="sbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519299 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="sbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519317 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519330 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519353 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519406 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519423 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519436 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.519452 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="nbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519510 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="nbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519681 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519697 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-ovn-metrics" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519715 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="sbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519732 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519747 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d658bad-5773-4f96-a342-0521453eb299" containerName="collect-profiles" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519764 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="northd" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519776 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519793 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="nbdb" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519808 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-acl-logging" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519825 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovn-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.519845 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="kube-rbac-proxy-node" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.520019 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.520033 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.520237 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.520258 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerName="ovnkube-controller" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.523931 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.650933 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.650986 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651043 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqbqw\" (UniqueName: \"kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651068 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651091 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651135 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651136 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651156 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651210 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651245 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651315 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651341 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651361 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651386 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651438 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651458 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651489 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651529 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651556 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651583 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651610 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651667 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash\") pod \"64cc3123-ba76-4365-86ae-c4cf7c09a805\" (UID: \"64cc3123-ba76-4365-86ae-c4cf7c09a805\") " Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651250 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651248 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651270 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651269 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log" (OuterVolumeSpecName: "node-log") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651293 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651767 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651924 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-node-log\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.651965 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652000 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-netd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652025 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-ovn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652062 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-etc-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652163 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-script-lib\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652197 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-var-lib-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652231 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-config\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652257 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-systemd-units\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652299 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-kubelet\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652303 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652327 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-netns\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652340 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket" (OuterVolumeSpecName: "log-socket") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652354 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-bin\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652386 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1a7b986b-eac4-4529-9467-9d524749d946-ovn-node-metrics-cert\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652411 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-env-overrides\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652438 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg9mn\" (UniqueName: \"kubernetes.io/projected/1a7b986b-eac4-4529-9467-9d524749d946-kube-api-access-pg9mn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652464 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-slash\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652487 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652509 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-log-socket\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652535 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-systemd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652566 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652533 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652755 4985 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-ovn\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652772 4985 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-log-socket\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652785 4985 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652865 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652886 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652914 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652951 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash" (OuterVolumeSpecName: "host-slash") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.652980 4985 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-netd\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653014 4985 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653035 4985 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-kubelet\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653055 4985 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-node-log\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653184 4985 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653203 4985 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-systemd-units\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653217 4985 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653195 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.653322 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.656933 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw" (OuterVolumeSpecName: "kube-api-access-pqbqw") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "kube-api-access-pqbqw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.657338 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.663971 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "64cc3123-ba76-4365-86ae-c4cf7c09a805" (UID: "64cc3123-ba76-4365-86ae-c4cf7c09a805"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.753785 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-node-log\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.753870 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.753922 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-netd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.753969 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-ovn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754023 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-etc-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754078 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-script-lib\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754174 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-var-lib-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754229 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-config\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754273 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-systemd-units\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754341 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-kubelet\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754388 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-netns\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754437 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-bin\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754491 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1a7b986b-eac4-4529-9467-9d524749d946-ovn-node-metrics-cert\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754533 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-env-overrides\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754583 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-slash\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754626 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754669 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg9mn\" (UniqueName: \"kubernetes.io/projected/1a7b986b-eac4-4529-9467-9d524749d946-kube-api-access-pg9mn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754709 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-node-log\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754712 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-log-socket\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754770 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-log-socket\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754794 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-systemd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754777 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-systemd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754843 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754849 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754877 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-netd\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754927 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754943 4985 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754955 4985 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-systemd\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754964 4985 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/64cc3123-ba76-4365-86ae-c4cf7c09a805-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754976 4985 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-run-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.754992 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-var-lib-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755017 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-ovn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755000 4985 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/64cc3123-ba76-4365-86ae-c4cf7c09a805-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755036 4985 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-slash\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755046 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqbqw\" (UniqueName: \"kubernetes.io/projected/64cc3123-ba76-4365-86ae-c4cf7c09a805-kube-api-access-pqbqw\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755055 4985 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-run-netns\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755077 4985 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-host-cni-bin\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755085 4985 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/64cc3123-ba76-4365-86ae-c4cf7c09a805-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755127 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-etc-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755354 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-script-lib\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755417 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-slash\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755868 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-env-overrides\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755961 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/1a7b986b-eac4-4529-9467-9d524749d946-ovnkube-config\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.755984 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-kubelet\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.756043 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-systemd-units\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.756067 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-run-openvswitch\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.756088 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-run-netns\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.756126 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/1a7b986b-eac4-4529-9467-9d524749d946-host-cni-bin\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.759616 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/1a7b986b-eac4-4529-9467-9d524749d946-ovn-node-metrics-cert\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.775082 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg9mn\" (UniqueName: \"kubernetes.io/projected/1a7b986b-eac4-4529-9467-9d524749d946-kube-api-access-pg9mn\") pod \"ovnkube-node-w2bkq\" (UID: \"1a7b986b-eac4-4529-9467-9d524749d946\") " pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.841816 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.958944 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/2.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.959645 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/1.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.959693 4985 generic.go:334] "Generic (PLEG): container finished" podID="0294dfed-64df-4d3c-92de-7a93787780a2" containerID="70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b" exitCode=2 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.959748 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerDied","Data":"70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.959779 4985 scope.go:117] "RemoveContainer" containerID="37f704152429d54471e85318a5e83ab5aa842441c44fc4e6615bb50d8fe2b03e" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.960638 4985 scope.go:117] "RemoveContainer" containerID="70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b" Jan 25 00:16:59 crc kubenswrapper[4985]: E0125 00:16:59.961035 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4w9l7_openshift-multus(0294dfed-64df-4d3c-92de-7a93787780a2)\"" pod="openshift-multus/multus-4w9l7" podUID="0294dfed-64df-4d3c-92de-7a93787780a2" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.963510 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovnkube-controller/3.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.967226 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovn-acl-logging/0.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968138 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-cc28q_64cc3123-ba76-4365-86ae-c4cf7c09a805/ovn-controller/0.log" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968658 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968684 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968694 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968705 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968713 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968721 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" exitCode=0 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968727 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" exitCode=143 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968735 4985 generic.go:334] "Generic (PLEG): container finished" podID="64cc3123-ba76-4365-86ae-c4cf7c09a805" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" exitCode=143 Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968768 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968788 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968799 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968808 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968819 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968828 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968838 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968847 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968853 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968858 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968864 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968870 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968874 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968879 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968884 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968889 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968895 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968902 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968908 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968913 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968917 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968922 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968927 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968932 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968937 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968942 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968946 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968954 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968964 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968970 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.968976 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969012 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969017 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969022 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969027 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969038 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969043 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969049 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969056 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" event={"ID":"64cc3123-ba76-4365-86ae-c4cf7c09a805","Type":"ContainerDied","Data":"a3105f031b5159130a4c5d5b1210ece101c40faf3453dd470fc723525955aa14"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969063 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969070 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969075 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969080 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969085 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969091 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969095 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969102 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969122 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969127 4985 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.969206 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-cc28q" Jan 25 00:16:59 crc kubenswrapper[4985]: I0125 00:16:59.973911 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"6d0347ad3aae10f761a2cd204dbfee6bd578c7eb7c3b91a7eb8f1811ad4a7bdc"} Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.025801 4985 scope.go:117] "RemoveContainer" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.033143 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cc28q"] Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.033827 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-cc28q"] Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.067938 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.086939 4985 scope.go:117] "RemoveContainer" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.099953 4985 scope.go:117] "RemoveContainer" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.112961 4985 scope.go:117] "RemoveContainer" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.125998 4985 scope.go:117] "RemoveContainer" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.137802 4985 scope.go:117] "RemoveContainer" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.150356 4985 scope.go:117] "RemoveContainer" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.162891 4985 scope.go:117] "RemoveContainer" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.176818 4985 scope.go:117] "RemoveContainer" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.189206 4985 scope.go:117] "RemoveContainer" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.189580 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": container with ID starting with a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057 not found: ID does not exist" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.189621 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} err="failed to get container status \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": rpc error: code = NotFound desc = could not find container \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": container with ID starting with a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.189650 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.189985 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": container with ID starting with cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c not found: ID does not exist" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.190122 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} err="failed to get container status \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": rpc error: code = NotFound desc = could not find container \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": container with ID starting with cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.190238 4985 scope.go:117] "RemoveContainer" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.190614 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": container with ID starting with 94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb not found: ID does not exist" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.190647 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} err="failed to get container status \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": rpc error: code = NotFound desc = could not find container \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": container with ID starting with 94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.190671 4985 scope.go:117] "RemoveContainer" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.190968 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": container with ID starting with dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714 not found: ID does not exist" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.191081 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} err="failed to get container status \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": rpc error: code = NotFound desc = could not find container \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": container with ID starting with dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.191192 4985 scope.go:117] "RemoveContainer" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.191571 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": container with ID starting with eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1 not found: ID does not exist" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.191697 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} err="failed to get container status \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": rpc error: code = NotFound desc = could not find container \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": container with ID starting with eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.191804 4985 scope.go:117] "RemoveContainer" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.192255 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": container with ID starting with 6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff not found: ID does not exist" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.192399 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} err="failed to get container status \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": rpc error: code = NotFound desc = could not find container \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": container with ID starting with 6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.192505 4985 scope.go:117] "RemoveContainer" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.193126 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": container with ID starting with 2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3 not found: ID does not exist" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.193270 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} err="failed to get container status \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": rpc error: code = NotFound desc = could not find container \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": container with ID starting with 2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.193376 4985 scope.go:117] "RemoveContainer" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.193805 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": container with ID starting with dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67 not found: ID does not exist" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.193839 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} err="failed to get container status \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": rpc error: code = NotFound desc = could not find container \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": container with ID starting with dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.193863 4985 scope.go:117] "RemoveContainer" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.194226 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": container with ID starting with c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398 not found: ID does not exist" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.194294 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} err="failed to get container status \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": rpc error: code = NotFound desc = could not find container \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": container with ID starting with c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.194336 4985 scope.go:117] "RemoveContainer" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: E0125 00:17:00.194707 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": container with ID starting with 7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d not found: ID does not exist" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.194837 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} err="failed to get container status \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": rpc error: code = NotFound desc = could not find container \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": container with ID starting with 7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.194942 4985 scope.go:117] "RemoveContainer" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.195421 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} err="failed to get container status \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": rpc error: code = NotFound desc = could not find container \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": container with ID starting with a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.195453 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.195760 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} err="failed to get container status \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": rpc error: code = NotFound desc = could not find container \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": container with ID starting with cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.195877 4985 scope.go:117] "RemoveContainer" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.196317 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} err="failed to get container status \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": rpc error: code = NotFound desc = could not find container \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": container with ID starting with 94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.196344 4985 scope.go:117] "RemoveContainer" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.196764 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} err="failed to get container status \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": rpc error: code = NotFound desc = could not find container \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": container with ID starting with dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.196786 4985 scope.go:117] "RemoveContainer" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.197129 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} err="failed to get container status \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": rpc error: code = NotFound desc = could not find container \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": container with ID starting with eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.197160 4985 scope.go:117] "RemoveContainer" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.197463 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} err="failed to get container status \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": rpc error: code = NotFound desc = could not find container \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": container with ID starting with 6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.197575 4985 scope.go:117] "RemoveContainer" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.197984 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} err="failed to get container status \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": rpc error: code = NotFound desc = could not find container \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": container with ID starting with 2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.198250 4985 scope.go:117] "RemoveContainer" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.198674 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} err="failed to get container status \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": rpc error: code = NotFound desc = could not find container \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": container with ID starting with dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.198792 4985 scope.go:117] "RemoveContainer" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199138 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} err="failed to get container status \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": rpc error: code = NotFound desc = could not find container \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": container with ID starting with c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199160 4985 scope.go:117] "RemoveContainer" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199470 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} err="failed to get container status \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": rpc error: code = NotFound desc = could not find container \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": container with ID starting with 7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199583 4985 scope.go:117] "RemoveContainer" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199888 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} err="failed to get container status \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": rpc error: code = NotFound desc = could not find container \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": container with ID starting with a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.199910 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.200262 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} err="failed to get container status \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": rpc error: code = NotFound desc = could not find container \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": container with ID starting with cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.200389 4985 scope.go:117] "RemoveContainer" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.200763 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} err="failed to get container status \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": rpc error: code = NotFound desc = could not find container \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": container with ID starting with 94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.200882 4985 scope.go:117] "RemoveContainer" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201225 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} err="failed to get container status \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": rpc error: code = NotFound desc = could not find container \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": container with ID starting with dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201248 4985 scope.go:117] "RemoveContainer" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201523 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} err="failed to get container status \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": rpc error: code = NotFound desc = could not find container \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": container with ID starting with eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201547 4985 scope.go:117] "RemoveContainer" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201823 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} err="failed to get container status \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": rpc error: code = NotFound desc = could not find container \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": container with ID starting with 6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.201933 4985 scope.go:117] "RemoveContainer" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.202315 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} err="failed to get container status \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": rpc error: code = NotFound desc = could not find container \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": container with ID starting with 2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.202449 4985 scope.go:117] "RemoveContainer" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.202745 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} err="failed to get container status \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": rpc error: code = NotFound desc = could not find container \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": container with ID starting with dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.202768 4985 scope.go:117] "RemoveContainer" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.203098 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} err="failed to get container status \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": rpc error: code = NotFound desc = could not find container \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": container with ID starting with c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.203226 4985 scope.go:117] "RemoveContainer" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.203622 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} err="failed to get container status \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": rpc error: code = NotFound desc = could not find container \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": container with ID starting with 7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.203753 4985 scope.go:117] "RemoveContainer" containerID="a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204054 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057"} err="failed to get container status \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": rpc error: code = NotFound desc = could not find container \"a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057\": container with ID starting with a0c585b8afbb35745a52dde210b630165db629747ea8c03670bb97fd0ee4d057 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204079 4985 scope.go:117] "RemoveContainer" containerID="cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204351 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c"} err="failed to get container status \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": rpc error: code = NotFound desc = could not find container \"cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c\": container with ID starting with cb951b5e71384071465a2286ef97dae8bff0aacc637cb2b8818787033722b80c not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204372 4985 scope.go:117] "RemoveContainer" containerID="94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204560 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb"} err="failed to get container status \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": rpc error: code = NotFound desc = could not find container \"94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb\": container with ID starting with 94c132ee238e6d768a85ef36dd9930152c0ac94c147a4067a44edc2554cddcfb not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204584 4985 scope.go:117] "RemoveContainer" containerID="dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204864 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714"} err="failed to get container status \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": rpc error: code = NotFound desc = could not find container \"dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714\": container with ID starting with dd812c5124d278b3c97247db9561661566fd06d650600efaa81e9e036c0a0714 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.204909 4985 scope.go:117] "RemoveContainer" containerID="eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.205180 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1"} err="failed to get container status \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": rpc error: code = NotFound desc = could not find container \"eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1\": container with ID starting with eb6fe550b8384f2ab9b1e8e843352db0c71e99ddea78d0ab6538913c157ba0b1 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.205341 4985 scope.go:117] "RemoveContainer" containerID="6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.205665 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff"} err="failed to get container status \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": rpc error: code = NotFound desc = could not find container \"6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff\": container with ID starting with 6c3b9403fd8f0b41ffb5dec39daf4c255058624d43d22b77205a7701fcf5f0ff not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.205689 4985 scope.go:117] "RemoveContainer" containerID="2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206276 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3"} err="failed to get container status \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": rpc error: code = NotFound desc = could not find container \"2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3\": container with ID starting with 2e8a9236595820f7a080bca58f5975b150d43426733cb54bd22673ad2870b0b3 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206315 4985 scope.go:117] "RemoveContainer" containerID="dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206565 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67"} err="failed to get container status \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": rpc error: code = NotFound desc = could not find container \"dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67\": container with ID starting with dc06627f9904a303db078ee42f52d314109b3e26767c7d0b9fc2a37a5ad6bf67 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206589 4985 scope.go:117] "RemoveContainer" containerID="c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206917 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398"} err="failed to get container status \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": rpc error: code = NotFound desc = could not find container \"c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398\": container with ID starting with c2e153fc57af7a17e1ee127831c8bf0e6756e7630ca4226fb6b076518ccb6398 not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.206952 4985 scope.go:117] "RemoveContainer" containerID="7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.207302 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d"} err="failed to get container status \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": rpc error: code = NotFound desc = could not find container \"7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d\": container with ID starting with 7076099636c0f9b8014769b7bb74cd89c3e1a77617177d515cfcb7bb763eed8d not found: ID does not exist" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.281257 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64cc3123-ba76-4365-86ae-c4cf7c09a805" path="/var/lib/kubelet/pods/64cc3123-ba76-4365-86ae-c4cf7c09a805/volumes" Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.981512 4985 generic.go:334] "Generic (PLEG): container finished" podID="1a7b986b-eac4-4529-9467-9d524749d946" containerID="980b1297ed01200b79c956a32ca1298aa40df3fb3f7084714076a63027123723" exitCode=0 Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.981608 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerDied","Data":"980b1297ed01200b79c956a32ca1298aa40df3fb3f7084714076a63027123723"} Jan 25 00:17:00 crc kubenswrapper[4985]: I0125 00:17:00.984965 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/2.log" Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.003514 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"354fa0025b94e9a1cddc4b012bb1f16a2436d3aa1b012576dca3f68f340d6f9d"} Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.004211 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"902d9a75e16f842d047a0420a668c5c0b861287e1f09807baa30952f937ba159"} Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.004227 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"f13ff4813bc10232fcbc418ebe612d0f93de45f7676018dd5bc6d49a392572db"} Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.004240 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"2197cfa248e792ec888cf09f50fbbc64f4cde631f7daa3bd678f47d1fb2bdab9"} Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.004254 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"cde55be1447a88fcfe4f20492aab4973da2ec9abb105a67c30a37d7db19d7f41"} Jan 25 00:17:02 crc kubenswrapper[4985]: I0125 00:17:02.004266 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"71b9a0c88bc21817b13836438a9e968fcf6b0574cea2d4eb7bfd591953728a08"} Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.027795 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"16b569c562468714682087cd8ec782b1c591d6a3be51ae5ed7304fe630eba7da"} Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.836235 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.836692 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.836774 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.838081 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:17:05 crc kubenswrapper[4985]: I0125 00:17:05.838226 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd" gracePeriod=600 Jan 25 00:17:06 crc kubenswrapper[4985]: I0125 00:17:06.051771 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd" exitCode=0 Jan 25 00:17:06 crc kubenswrapper[4985]: I0125 00:17:06.051826 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd"} Jan 25 00:17:06 crc kubenswrapper[4985]: I0125 00:17:06.051865 4985 scope.go:117] "RemoveContainer" containerID="da40e82ed6c4f4bb8df94fc89421ac591c8928ddf8db2485dc08f8c949f5f50f" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.061195 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" event={"ID":"1a7b986b-eac4-4529-9467-9d524749d946","Type":"ContainerStarted","Data":"ef08e567714747d5cc2d2984c91ef4abc94fb337b41d67e99c20b21f8dc2e166"} Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.061993 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.062019 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.062042 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.065850 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01"} Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.098986 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.101283 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" podStartSLOduration=8.101270246 podStartE2EDuration="8.101270246s" podCreationTimestamp="2026-01-25 00:16:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:17:07.100495615 +0000 UTC m=+637.132431928" watchObservedRunningTime="2026-01-25 00:17:07.101270246 +0000 UTC m=+637.133206519" Jan 25 00:17:07 crc kubenswrapper[4985]: I0125 00:17:07.106488 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:17:13 crc kubenswrapper[4985]: I0125 00:17:13.274869 4985 scope.go:117] "RemoveContainer" containerID="70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b" Jan 25 00:17:13 crc kubenswrapper[4985]: E0125 00:17:13.277543 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-4w9l7_openshift-multus(0294dfed-64df-4d3c-92de-7a93787780a2)\"" pod="openshift-multus/multus-4w9l7" podUID="0294dfed-64df-4d3c-92de-7a93787780a2" Jan 25 00:17:27 crc kubenswrapper[4985]: I0125 00:17:27.274198 4985 scope.go:117] "RemoveContainer" containerID="70c28b2abf2e70c814a8c5ac83f93a3a6935c31e2d3b6f254ff98456404cb38b" Jan 25 00:17:28 crc kubenswrapper[4985]: I0125 00:17:28.217998 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-4w9l7_0294dfed-64df-4d3c-92de-7a93787780a2/kube-multus/2.log" Jan 25 00:17:28 crc kubenswrapper[4985]: I0125 00:17:28.218424 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-4w9l7" event={"ID":"0294dfed-64df-4d3c-92de-7a93787780a2","Type":"ContainerStarted","Data":"effb21f7da5c75dc7ad00e64014dcc8f10e5c96fe61106f1d9ef6a978d98f1e9"} Jan 25 00:17:29 crc kubenswrapper[4985]: I0125 00:17:29.875959 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-w2bkq" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.155962 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.156977 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wm845" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="registry-server" containerID="cri-o://7f040f5f1a3648ae30777f435cc7a040c573d3494ad068188d8a61cdedd89fc1" gracePeriod=30 Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.574154 4985 generic.go:334] "Generic (PLEG): container finished" podID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerID="7f040f5f1a3648ae30777f435cc7a040c573d3494ad068188d8a61cdedd89fc1" exitCode=0 Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.574218 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerDied","Data":"7f040f5f1a3648ae30777f435cc7a040c573d3494ad068188d8a61cdedd89fc1"} Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.574604 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wm845" event={"ID":"9a68aa2a-5b2f-4564-b0ff-967987869b33","Type":"ContainerDied","Data":"b4ff5dab6c3cf8242f82510b732dbb08eb2b36009a67d620c35943e6696dba50"} Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.574624 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b4ff5dab6c3cf8242f82510b732dbb08eb2b36009a67d620c35943e6696dba50" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.604747 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.648138 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities\") pod \"9a68aa2a-5b2f-4564-b0ff-967987869b33\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.648212 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crbqg\" (UniqueName: \"kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg\") pod \"9a68aa2a-5b2f-4564-b0ff-967987869b33\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.648253 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content\") pod \"9a68aa2a-5b2f-4564-b0ff-967987869b33\" (UID: \"9a68aa2a-5b2f-4564-b0ff-967987869b33\") " Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.649173 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities" (OuterVolumeSpecName: "utilities") pod "9a68aa2a-5b2f-4564-b0ff-967987869b33" (UID: "9a68aa2a-5b2f-4564-b0ff-967987869b33"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.655395 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg" (OuterVolumeSpecName: "kube-api-access-crbqg") pod "9a68aa2a-5b2f-4564-b0ff-967987869b33" (UID: "9a68aa2a-5b2f-4564-b0ff-967987869b33"). InnerVolumeSpecName "kube-api-access-crbqg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.678637 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9a68aa2a-5b2f-4564-b0ff-967987869b33" (UID: "9a68aa2a-5b2f-4564-b0ff-967987869b33"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.748848 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.748903 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crbqg\" (UniqueName: \"kubernetes.io/projected/9a68aa2a-5b2f-4564-b0ff-967987869b33-kube-api-access-crbqg\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:16 crc kubenswrapper[4985]: I0125 00:18:16.748918 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9a68aa2a-5b2f-4564-b0ff-967987869b33-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:17 crc kubenswrapper[4985]: I0125 00:18:17.581279 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wm845" Jan 25 00:18:17 crc kubenswrapper[4985]: I0125 00:18:17.640505 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:18:17 crc kubenswrapper[4985]: I0125 00:18:17.644772 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wm845"] Jan 25 00:18:18 crc kubenswrapper[4985]: I0125 00:18:18.285399 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" path="/var/lib/kubelet/pods/9a68aa2a-5b2f-4564-b0ff-967987869b33/volumes" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.227557 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68"] Jan 25 00:18:20 crc kubenswrapper[4985]: E0125 00:18:20.228287 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="extract-content" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.228310 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="extract-content" Jan 25 00:18:20 crc kubenswrapper[4985]: E0125 00:18:20.228346 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="registry-server" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.228359 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="registry-server" Jan 25 00:18:20 crc kubenswrapper[4985]: E0125 00:18:20.228378 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="extract-utilities" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.228391 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="extract-utilities" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.228577 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a68aa2a-5b2f-4564-b0ff-967987869b33" containerName="registry-server" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.230062 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.232321 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.244725 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68"] Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.288464 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.288522 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.288607 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7lmgt\" (UniqueName: \"kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.389652 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.389760 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.389827 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7lmgt\" (UniqueName: \"kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.390602 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.390639 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.410397 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7lmgt\" (UniqueName: \"kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt\") pod \"98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.559761 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:20 crc kubenswrapper[4985]: I0125 00:18:20.864977 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68"] Jan 25 00:18:21 crc kubenswrapper[4985]: I0125 00:18:21.605187 4985 generic.go:334] "Generic (PLEG): container finished" podID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerID="916437e3058ba4519dc799e03d80553677247836c10cb1554da7c6548259e699" exitCode=0 Jan 25 00:18:21 crc kubenswrapper[4985]: I0125 00:18:21.605289 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerDied","Data":"916437e3058ba4519dc799e03d80553677247836c10cb1554da7c6548259e699"} Jan 25 00:18:21 crc kubenswrapper[4985]: I0125 00:18:21.606445 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerStarted","Data":"69dfce17a5b22806918f282981689441504f7b7fb756f45920d08771819bf91f"} Jan 25 00:18:21 crc kubenswrapper[4985]: I0125 00:18:21.607197 4985 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 25 00:18:23 crc kubenswrapper[4985]: I0125 00:18:23.617050 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerStarted","Data":"2f351b37c9c820a2c2413aafc65407d70cbacc093976359a596a211770fa1843"} Jan 25 00:18:24 crc kubenswrapper[4985]: I0125 00:18:24.627288 4985 generic.go:334] "Generic (PLEG): container finished" podID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerID="2f351b37c9c820a2c2413aafc65407d70cbacc093976359a596a211770fa1843" exitCode=0 Jan 25 00:18:24 crc kubenswrapper[4985]: I0125 00:18:24.627357 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerDied","Data":"2f351b37c9c820a2c2413aafc65407d70cbacc093976359a596a211770fa1843"} Jan 25 00:18:25 crc kubenswrapper[4985]: I0125 00:18:25.648687 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerStarted","Data":"89ff45415da55aa18eb7fa5de077f474f07a7a0d327027c40e9088d99a049025"} Jan 25 00:18:25 crc kubenswrapper[4985]: I0125 00:18:25.676077 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" podStartSLOduration=4.361507602 podStartE2EDuration="5.676052498s" podCreationTimestamp="2026-01-25 00:18:20 +0000 UTC" firstStartedPulling="2026-01-25 00:18:21.6069323 +0000 UTC m=+711.638868583" lastFinishedPulling="2026-01-25 00:18:22.921477206 +0000 UTC m=+712.953413479" observedRunningTime="2026-01-25 00:18:25.6689985 +0000 UTC m=+715.700934773" watchObservedRunningTime="2026-01-25 00:18:25.676052498 +0000 UTC m=+715.707988781" Jan 25 00:18:26 crc kubenswrapper[4985]: I0125 00:18:26.657966 4985 generic.go:334] "Generic (PLEG): container finished" podID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerID="89ff45415da55aa18eb7fa5de077f474f07a7a0d327027c40e9088d99a049025" exitCode=0 Jan 25 00:18:26 crc kubenswrapper[4985]: I0125 00:18:26.658051 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerDied","Data":"89ff45415da55aa18eb7fa5de077f474f07a7a0d327027c40e9088d99a049025"} Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.018310 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.023697 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util\") pod \"1e0b8567-c91d-4d4e-a083-470f089b611c\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.023793 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle\") pod \"1e0b8567-c91d-4d4e-a083-470f089b611c\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.023836 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7lmgt\" (UniqueName: \"kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt\") pod \"1e0b8567-c91d-4d4e-a083-470f089b611c\" (UID: \"1e0b8567-c91d-4d4e-a083-470f089b611c\") " Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.028518 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle" (OuterVolumeSpecName: "bundle") pod "1e0b8567-c91d-4d4e-a083-470f089b611c" (UID: "1e0b8567-c91d-4d4e-a083-470f089b611c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.035817 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt" (OuterVolumeSpecName: "kube-api-access-7lmgt") pod "1e0b8567-c91d-4d4e-a083-470f089b611c" (UID: "1e0b8567-c91d-4d4e-a083-470f089b611c"). InnerVolumeSpecName "kube-api-access-7lmgt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.052956 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util" (OuterVolumeSpecName: "util") pod "1e0b8567-c91d-4d4e-a083-470f089b611c" (UID: "1e0b8567-c91d-4d4e-a083-470f089b611c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.124917 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.125122 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/1e0b8567-c91d-4d4e-a083-470f089b611c-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.125178 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7lmgt\" (UniqueName: \"kubernetes.io/projected/1e0b8567-c91d-4d4e-a083-470f089b611c-kube-api-access-7lmgt\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.673875 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" event={"ID":"1e0b8567-c91d-4d4e-a083-470f089b611c","Type":"ContainerDied","Data":"69dfce17a5b22806918f282981689441504f7b7fb756f45920d08771819bf91f"} Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.673952 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69dfce17a5b22806918f282981689441504f7b7fb756f45920d08771819bf91f" Jan 25 00:18:28 crc kubenswrapper[4985]: I0125 00:18:28.674544 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.203508 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs"] Jan 25 00:18:29 crc kubenswrapper[4985]: E0125 00:18:29.204012 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="util" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.204052 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="util" Jan 25 00:18:29 crc kubenswrapper[4985]: E0125 00:18:29.204078 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="pull" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.204093 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="pull" Jan 25 00:18:29 crc kubenswrapper[4985]: E0125 00:18:29.204157 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="extract" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.204175 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="extract" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.204439 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="1e0b8567-c91d-4d4e-a083-470f089b611c" containerName="extract" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.205996 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.209178 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.215079 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs"] Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.239787 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.239917 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.240038 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwrtk\" (UniqueName: \"kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.340714 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.341394 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwrtk\" (UniqueName: \"kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.341260 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.341653 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.341916 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.363075 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwrtk\" (UniqueName: \"kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk\") pod \"8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.529075 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:29 crc kubenswrapper[4985]: I0125 00:18:29.709059 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs"] Jan 25 00:18:29 crc kubenswrapper[4985]: W0125 00:18:29.715429 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod04fd0691_1cb5_4e06_80e8_cb251c8cf4d3.slice/crio-3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38 WatchSource:0}: Error finding container 3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38: Status 404 returned error can't find the container with id 3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38 Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.203861 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5"] Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.206932 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.214433 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5"] Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.254497 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.254590 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.254657 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pg8b7\" (UniqueName: \"kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.355297 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.355457 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pg8b7\" (UniqueName: \"kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.355521 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.356147 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.356510 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.375629 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pg8b7\" (UniqueName: \"kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7\") pod \"1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.568198 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.663815 4985 scope.go:117] "RemoveContainer" containerID="b8b61bf587ca504d77fed49e2251515a24afcf8399fde7459a1f30020dcb711b" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.695383 4985 generic.go:334] "Generic (PLEG): container finished" podID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerID="f041f659b18c4f18113a14335878fb82b02652d32d5431cc578926e1a4b5e00b" exitCode=0 Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.695430 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerDied","Data":"f041f659b18c4f18113a14335878fb82b02652d32d5431cc578926e1a4b5e00b"} Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.695459 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerStarted","Data":"3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38"} Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.696433 4985 scope.go:117] "RemoveContainer" containerID="0d2c13cb9625b31bbe6f1be3eb4b5edd5eeecf50d921350ad1086a6c90b26de4" Jan 25 00:18:30 crc kubenswrapper[4985]: I0125 00:18:30.733315 4985 scope.go:117] "RemoveContainer" containerID="7f040f5f1a3648ae30777f435cc7a040c573d3494ad068188d8a61cdedd89fc1" Jan 25 00:18:31 crc kubenswrapper[4985]: I0125 00:18:31.021392 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5"] Jan 25 00:18:31 crc kubenswrapper[4985]: W0125 00:18:31.040731 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9856c83_fe7c_4653_8d19_114a5b040a73.slice/crio-da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47 WatchSource:0}: Error finding container da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47: Status 404 returned error can't find the container with id da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47 Jan 25 00:18:31 crc kubenswrapper[4985]: I0125 00:18:31.702618 4985 generic.go:334] "Generic (PLEG): container finished" podID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerID="caac17ec7996d5628953b69e78360cb823522e66fe46e9ac3bafda778dca5a45" exitCode=0 Jan 25 00:18:31 crc kubenswrapper[4985]: I0125 00:18:31.702821 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerDied","Data":"caac17ec7996d5628953b69e78360cb823522e66fe46e9ac3bafda778dca5a45"} Jan 25 00:18:31 crc kubenswrapper[4985]: I0125 00:18:31.702976 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerStarted","Data":"da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47"} Jan 25 00:18:31 crc kubenswrapper[4985]: I0125 00:18:31.707325 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerStarted","Data":"60b863e73084ad92a066bef135b1072e1c443a482afda820582e9cec72f2e3fd"} Jan 25 00:18:32 crc kubenswrapper[4985]: I0125 00:18:32.719131 4985 generic.go:334] "Generic (PLEG): container finished" podID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerID="60b863e73084ad92a066bef135b1072e1c443a482afda820582e9cec72f2e3fd" exitCode=0 Jan 25 00:18:32 crc kubenswrapper[4985]: I0125 00:18:32.719184 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerDied","Data":"60b863e73084ad92a066bef135b1072e1c443a482afda820582e9cec72f2e3fd"} Jan 25 00:18:33 crc kubenswrapper[4985]: I0125 00:18:33.741323 4985 generic.go:334] "Generic (PLEG): container finished" podID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerID="7bd27f7f4d63761066dab6aaf5f776fbc81f44a5d54f6476859d66056aa79a02" exitCode=0 Jan 25 00:18:33 crc kubenswrapper[4985]: I0125 00:18:33.741375 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerDied","Data":"7bd27f7f4d63761066dab6aaf5f776fbc81f44a5d54f6476859d66056aa79a02"} Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.495815 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.701035 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle\") pod \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.701128 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwrtk\" (UniqueName: \"kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk\") pod \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.701181 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util\") pod \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\" (UID: \"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3\") " Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.702257 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle" (OuterVolumeSpecName: "bundle") pod "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" (UID: "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.709687 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk" (OuterVolumeSpecName: "kube-api-access-xwrtk") pod "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" (UID: "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3"). InnerVolumeSpecName "kube-api-access-xwrtk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.712972 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util" (OuterVolumeSpecName: "util") pod "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" (UID: "04fd0691-1cb5-4e06-80e8-cb251c8cf4d3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.785873 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" event={"ID":"04fd0691-1cb5-4e06-80e8-cb251c8cf4d3","Type":"ContainerDied","Data":"3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38"} Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.786262 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3da3d40b21218e9c0f26f75e77a2e2af4ef2d8009e06787d2d6d03351b2d4e38" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.786172 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.787888 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerStarted","Data":"579164d8f0551e560bb7e35923903b3b2163a3b80808fe98a7225ab0da72cc2b"} Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.802221 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.802439 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwrtk\" (UniqueName: \"kubernetes.io/projected/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-kube-api-access-xwrtk\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:37 crc kubenswrapper[4985]: I0125 00:18:37.802506 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/04fd0691-1cb5-4e06-80e8-cb251c8cf4d3-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:38 crc kubenswrapper[4985]: I0125 00:18:38.795609 4985 generic.go:334] "Generic (PLEG): container finished" podID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerID="579164d8f0551e560bb7e35923903b3b2163a3b80808fe98a7225ab0da72cc2b" exitCode=0 Jan 25 00:18:38 crc kubenswrapper[4985]: I0125 00:18:38.795702 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerDied","Data":"579164d8f0551e560bb7e35923903b3b2163a3b80808fe98a7225ab0da72cc2b"} Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.802795 4985 generic.go:334] "Generic (PLEG): container finished" podID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerID="022262d1baa8273090cb7113f02a79963a2ee41ce9e07fd516068e444bcd3ad3" exitCode=0 Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.802915 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerDied","Data":"022262d1baa8273090cb7113f02a79963a2ee41ce9e07fd516068e444bcd3ad3"} Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.855422 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr"] Jan 25 00:18:39 crc kubenswrapper[4985]: E0125 00:18:39.855641 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="extract" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.855655 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="extract" Jan 25 00:18:39 crc kubenswrapper[4985]: E0125 00:18:39.855672 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="pull" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.855679 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="pull" Jan 25 00:18:39 crc kubenswrapper[4985]: E0125 00:18:39.855697 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="util" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.855705 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="util" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.855807 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="04fd0691-1cb5-4e06-80e8-cb251c8cf4d3" containerName="extract" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.856219 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.858835 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"openshift-service-ca.crt" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.859117 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operators"/"kube-root-ca.crt" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.859890 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-dockercfg-j472x" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.871527 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr"] Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.928227 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfdn7\" (UniqueName: \"kubernetes.io/projected/16badb6b-d0b6-454e-a544-6811966984a6-kube-api-access-lfdn7\") pod \"obo-prometheus-operator-68bc856cb9-85xxr\" (UID: \"16badb6b-d0b6-454e-a544-6811966984a6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.977023 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt"] Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.977630 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.979905 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-dockercfg-5ml25" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.980161 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"obo-prometheus-operator-admission-webhook-service-cert" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.988624 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx"] Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.989362 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:39 crc kubenswrapper[4985]: I0125 00:18:39.996474 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.030033 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.030294 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfdn7\" (UniqueName: \"kubernetes.io/projected/16badb6b-d0b6-454e-a544-6811966984a6-kube-api-access-lfdn7\") pod \"obo-prometheus-operator-68bc856cb9-85xxr\" (UID: \"16badb6b-d0b6-454e-a544-6811966984a6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.062782 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfdn7\" (UniqueName: \"kubernetes.io/projected/16badb6b-d0b6-454e-a544-6811966984a6-kube-api-access-lfdn7\") pod \"obo-prometheus-operator-68bc856cb9-85xxr\" (UID: \"16badb6b-d0b6-454e-a544-6811966984a6\") " pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.131437 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.131495 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.131521 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.131576 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.172311 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-t7dvp"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.173155 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.176338 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-sa-dockercfg-b8hgq" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.177305 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"observability-operator-tls" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.181297 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.185385 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-t7dvp"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236058 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236142 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ded8ba3b-7925-430a-a595-93b3b00ae9b5-observability-operator-tls\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236180 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236214 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236249 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2m8z\" (UniqueName: \"kubernetes.io/projected/ded8ba3b-7925-430a-a595-93b3b00ae9b5-kube-api-access-z2m8z\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.236273 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.243313 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.247526 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.247526 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/ba5b943c-3070-4afc-b57c-fb0be23f2a32-apiservice-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-q6cxx\" (UID: \"ba5b943c-3070-4afc-b57c-fb0be23f2a32\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.247691 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/b1769099-2ab1-4b9b-b373-dc9e096c14d9-webhook-cert\") pod \"obo-prometheus-operator-admission-webhook-954995c76-xnqrt\" (UID: \"b1769099-2ab1-4b9b-b373-dc9e096c14d9\") " pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.291499 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.304350 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.337339 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2m8z\" (UniqueName: \"kubernetes.io/projected/ded8ba3b-7925-430a-a595-93b3b00ae9b5-kube-api-access-z2m8z\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.337420 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ded8ba3b-7925-430a-a595-93b3b00ae9b5-observability-operator-tls\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.340680 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"observability-operator-tls\" (UniqueName: \"kubernetes.io/secret/ded8ba3b-7925-430a-a595-93b3b00ae9b5-observability-operator-tls\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.358503 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2m8z\" (UniqueName: \"kubernetes.io/projected/ded8ba3b-7925-430a-a595-93b3b00ae9b5-kube-api-access-z2m8z\") pod \"observability-operator-59bdc8b94-t7dvp\" (UID: \"ded8ba3b-7925-430a-a595-93b3b00ae9b5\") " pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.375040 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-xv6dg"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.376431 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.383829 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operators"/"perses-operator-dockercfg-qkx6n" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.392971 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-xv6dg"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.456956 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.491487 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.541075 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cn8mw\" (UniqueName: \"kubernetes.io/projected/5221e9c8-1162-482b-8120-6dd97c481304-kube-api-access-cn8mw\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.541157 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5221e9c8-1162-482b-8120-6dd97c481304-openshift-service-ca\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.555353 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt"] Jan 25 00:18:40 crc kubenswrapper[4985]: W0125 00:18:40.567221 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1769099_2ab1_4b9b_b373_dc9e096c14d9.slice/crio-5e361ff5973e08171b4d11474bdd8acfd091e340cb2f4d3e9b1e096394600dd3 WatchSource:0}: Error finding container 5e361ff5973e08171b4d11474bdd8acfd091e340cb2f4d3e9b1e096394600dd3: Status 404 returned error can't find the container with id 5e361ff5973e08171b4d11474bdd8acfd091e340cb2f4d3e9b1e096394600dd3 Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.624364 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx"] Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.642441 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5221e9c8-1162-482b-8120-6dd97c481304-openshift-service-ca\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.642515 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cn8mw\" (UniqueName: \"kubernetes.io/projected/5221e9c8-1162-482b-8120-6dd97c481304-kube-api-access-cn8mw\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.643315 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openshift-service-ca\" (UniqueName: \"kubernetes.io/configmap/5221e9c8-1162-482b-8120-6dd97c481304-openshift-service-ca\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.660720 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cn8mw\" (UniqueName: \"kubernetes.io/projected/5221e9c8-1162-482b-8120-6dd97c481304-kube-api-access-cn8mw\") pod \"perses-operator-5bf474d74f-xv6dg\" (UID: \"5221e9c8-1162-482b-8120-6dd97c481304\") " pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.707644 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.723303 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/observability-operator-59bdc8b94-t7dvp"] Jan 25 00:18:40 crc kubenswrapper[4985]: W0125 00:18:40.733406 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podded8ba3b_7925_430a_a595_93b3b00ae9b5.slice/crio-1824994127f83648729d83cb4b6d3aeb3fde7d3728f07e92d94394bf4f5c0539 WatchSource:0}: Error finding container 1824994127f83648729d83cb4b6d3aeb3fde7d3728f07e92d94394bf4f5c0539: Status 404 returned error can't find the container with id 1824994127f83648729d83cb4b6d3aeb3fde7d3728f07e92d94394bf4f5c0539 Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.814900 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" event={"ID":"b1769099-2ab1-4b9b-b373-dc9e096c14d9","Type":"ContainerStarted","Data":"5e361ff5973e08171b4d11474bdd8acfd091e340cb2f4d3e9b1e096394600dd3"} Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.820704 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" event={"ID":"16badb6b-d0b6-454e-a544-6811966984a6","Type":"ContainerStarted","Data":"dfd3213a4fc56dd699c814a7b3c9ebf6befc8b8046d80945dea809944b35dd07"} Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.821887 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" event={"ID":"ded8ba3b-7925-430a-a595-93b3b00ae9b5","Type":"ContainerStarted","Data":"1824994127f83648729d83cb4b6d3aeb3fde7d3728f07e92d94394bf4f5c0539"} Jan 25 00:18:40 crc kubenswrapper[4985]: I0125 00:18:40.823036 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" event={"ID":"ba5b943c-3070-4afc-b57c-fb0be23f2a32","Type":"ContainerStarted","Data":"6ddc298f566fc1844d9baf91ecc973a8be6e76e72fe885c43d64f3c7d42149a5"} Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.152956 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.181022 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operators/perses-operator-5bf474d74f-xv6dg"] Jan 25 00:18:41 crc kubenswrapper[4985]: W0125 00:18:41.189971 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5221e9c8_1162_482b_8120_6dd97c481304.slice/crio-70552a0ca5cdb8867b04e3dfe31f9ddaf4315167cc70cb14f9bb63783d69d49e WatchSource:0}: Error finding container 70552a0ca5cdb8867b04e3dfe31f9ddaf4315167cc70cb14f9bb63783d69d49e: Status 404 returned error can't find the container with id 70552a0ca5cdb8867b04e3dfe31f9ddaf4315167cc70cb14f9bb63783d69d49e Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.353746 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle\") pod \"a9856c83-fe7c-4653-8d19-114a5b040a73\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.353829 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util\") pod \"a9856c83-fe7c-4653-8d19-114a5b040a73\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.353860 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pg8b7\" (UniqueName: \"kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7\") pod \"a9856c83-fe7c-4653-8d19-114a5b040a73\" (UID: \"a9856c83-fe7c-4653-8d19-114a5b040a73\") " Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.355755 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle" (OuterVolumeSpecName: "bundle") pod "a9856c83-fe7c-4653-8d19-114a5b040a73" (UID: "a9856c83-fe7c-4653-8d19-114a5b040a73"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.357882 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7" (OuterVolumeSpecName: "kube-api-access-pg8b7") pod "a9856c83-fe7c-4653-8d19-114a5b040a73" (UID: "a9856c83-fe7c-4653-8d19-114a5b040a73"). InnerVolumeSpecName "kube-api-access-pg8b7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.382638 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util" (OuterVolumeSpecName: "util") pod "a9856c83-fe7c-4653-8d19-114a5b040a73" (UID: "a9856c83-fe7c-4653-8d19-114a5b040a73"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.454781 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.454823 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pg8b7\" (UniqueName: \"kubernetes.io/projected/a9856c83-fe7c-4653-8d19-114a5b040a73-kube-api-access-pg8b7\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.454835 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a9856c83-fe7c-4653-8d19-114a5b040a73-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.827442 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" event={"ID":"5221e9c8-1162-482b-8120-6dd97c481304","Type":"ContainerStarted","Data":"70552a0ca5cdb8867b04e3dfe31f9ddaf4315167cc70cb14f9bb63783d69d49e"} Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.829299 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" event={"ID":"a9856c83-fe7c-4653-8d19-114a5b040a73","Type":"ContainerDied","Data":"da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47"} Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.829329 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da30c24409c28c71e9b381c20c65ded482eaa9ebf51af07e5801fe7c1e739f47" Jan 25 00:18:41 crc kubenswrapper[4985]: I0125 00:18:41.829390 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.852316 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elastic-operator-79b7456484-pltwd"] Jan 25 00:18:47 crc kubenswrapper[4985]: E0125 00:18:47.852787 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="extract" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.852802 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="extract" Jan 25 00:18:47 crc kubenswrapper[4985]: E0125 00:18:47.852811 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="util" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.852817 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="util" Jan 25 00:18:47 crc kubenswrapper[4985]: E0125 00:18:47.852831 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="pull" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.852838 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="pull" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.852964 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9856c83-fe7c-4653-8d19-114a5b040a73" containerName="extract" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.853413 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.857296 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-dockercfg-d8pgx" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.857355 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elastic-operator-service-cert" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.857566 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"openshift-service-ca.crt" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.858034 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"kube-root-ca.crt" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.867650 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-79b7456484-pltwd"] Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.972259 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-webhook-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.972344 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tmlvb\" (UniqueName: \"kubernetes.io/projected/e0cc61de-66cc-4674-8ee8-9039337ead4c-kube-api-access-tmlvb\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:47 crc kubenswrapper[4985]: I0125 00:18:47.972403 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-apiservice-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.073344 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tmlvb\" (UniqueName: \"kubernetes.io/projected/e0cc61de-66cc-4674-8ee8-9039337ead4c-kube-api-access-tmlvb\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.073434 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-apiservice-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.073456 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-webhook-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.079169 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-webhook-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.091985 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e0cc61de-66cc-4674-8ee8-9039337ead4c-apiservice-cert\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.093207 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tmlvb\" (UniqueName: \"kubernetes.io/projected/e0cc61de-66cc-4674-8ee8-9039337ead4c-kube-api-access-tmlvb\") pod \"elastic-operator-79b7456484-pltwd\" (UID: \"e0cc61de-66cc-4674-8ee8-9039337ead4c\") " pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:48 crc kubenswrapper[4985]: I0125 00:18:48.167207 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elastic-operator-79b7456484-pltwd" Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.530283 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elastic-operator-79b7456484-pltwd"] Jan 25 00:18:52 crc kubenswrapper[4985]: W0125 00:18:52.532279 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0cc61de_66cc_4674_8ee8_9039337ead4c.slice/crio-e46c70f8e8551b583f3f9f911183309cc0ed81e146a6ee009e4b0f306aac5662 WatchSource:0}: Error finding container e46c70f8e8551b583f3f9f911183309cc0ed81e146a6ee009e4b0f306aac5662: Status 404 returned error can't find the container with id e46c70f8e8551b583f3f9f911183309cc0ed81e146a6ee009e4b0f306aac5662 Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.901836 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" event={"ID":"16badb6b-d0b6-454e-a544-6811966984a6","Type":"ContainerStarted","Data":"0b47c0c505e72c102ac61c25ae146a0225e444dd84d9a4511674428da208acd7"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.904416 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" event={"ID":"ded8ba3b-7925-430a-a595-93b3b00ae9b5","Type":"ContainerStarted","Data":"1e3d8b285c04d7db41248b8ede6cb6b5cbd0ac8ac6092f327cefca792a68ecdf"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.905278 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.908715 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" event={"ID":"ba5b943c-3070-4afc-b57c-fb0be23f2a32","Type":"ContainerStarted","Data":"2857c65b85587dd38009c1a02a951bdf4ab26d5d825a0d853f8a137e36816b2e"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.910518 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-79b7456484-pltwd" event={"ID":"e0cc61de-66cc-4674-8ee8-9039337ead4c","Type":"ContainerStarted","Data":"e46c70f8e8551b583f3f9f911183309cc0ed81e146a6ee009e4b0f306aac5662"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.912911 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" event={"ID":"5221e9c8-1162-482b-8120-6dd97c481304","Type":"ContainerStarted","Data":"7a455360eaba16dec67c920912cda4afc40545ff8ea8a89cbebdd020bca1d2c0"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.913071 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.914914 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" event={"ID":"b1769099-2ab1-4b9b-b373-dc9e096c14d9","Type":"ContainerStarted","Data":"8378777266c75ad3e30ff0ebf7e7dfe3de7db9d127dd0c9be17fe58d04073ee6"} Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.928944 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-68bc856cb9-85xxr" podStartSLOduration=2.304743178 podStartE2EDuration="13.928920989s" podCreationTimestamp="2026-01-25 00:18:39 +0000 UTC" firstStartedPulling="2026-01-25 00:18:40.484833557 +0000 UTC m=+730.516769830" lastFinishedPulling="2026-01-25 00:18:52.109011378 +0000 UTC m=+742.140947641" observedRunningTime="2026-01-25 00:18:52.924413198 +0000 UTC m=+742.956349481" watchObservedRunningTime="2026-01-25 00:18:52.928920989 +0000 UTC m=+742.960857272" Jan 25 00:18:52 crc kubenswrapper[4985]: I0125 00:18:52.953578 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-q6cxx" podStartSLOduration=2.48717662 podStartE2EDuration="13.953560526s" podCreationTimestamp="2026-01-25 00:18:39 +0000 UTC" firstStartedPulling="2026-01-25 00:18:40.636006866 +0000 UTC m=+730.667943139" lastFinishedPulling="2026-01-25 00:18:52.102390782 +0000 UTC m=+742.134327045" observedRunningTime="2026-01-25 00:18:52.951608313 +0000 UTC m=+742.983544606" watchObservedRunningTime="2026-01-25 00:18:52.953560526 +0000 UTC m=+742.985496799" Jan 25 00:18:53 crc kubenswrapper[4985]: I0125 00:18:53.026773 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" podStartSLOduration=1.6026308660000002 podStartE2EDuration="13.026749826s" podCreationTimestamp="2026-01-25 00:18:40 +0000 UTC" firstStartedPulling="2026-01-25 00:18:40.736163245 +0000 UTC m=+730.768099518" lastFinishedPulling="2026-01-25 00:18:52.160282205 +0000 UTC m=+742.192218478" observedRunningTime="2026-01-25 00:18:53.021417244 +0000 UTC m=+743.053353537" watchObservedRunningTime="2026-01-25 00:18:53.026749826 +0000 UTC m=+743.058686099" Jan 25 00:18:53 crc kubenswrapper[4985]: I0125 00:18:53.045960 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/obo-prometheus-operator-admission-webhook-954995c76-xnqrt" podStartSLOduration=2.511107928 podStartE2EDuration="14.045941058s" podCreationTimestamp="2026-01-25 00:18:39 +0000 UTC" firstStartedPulling="2026-01-25 00:18:40.570344986 +0000 UTC m=+730.602281259" lastFinishedPulling="2026-01-25 00:18:52.105178116 +0000 UTC m=+742.137114389" observedRunningTime="2026-01-25 00:18:53.044695364 +0000 UTC m=+743.076631657" watchObservedRunningTime="2026-01-25 00:18:53.045941058 +0000 UTC m=+743.077877331" Jan 25 00:18:53 crc kubenswrapper[4985]: I0125 00:18:53.061785 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/observability-operator-59bdc8b94-t7dvp" Jan 25 00:18:53 crc kubenswrapper[4985]: I0125 00:18:53.070746 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" podStartSLOduration=2.109350251 podStartE2EDuration="13.070722548s" podCreationTimestamp="2026-01-25 00:18:40 +0000 UTC" firstStartedPulling="2026-01-25 00:18:41.192483176 +0000 UTC m=+731.224419439" lastFinishedPulling="2026-01-25 00:18:52.153855463 +0000 UTC m=+742.185791736" observedRunningTime="2026-01-25 00:18:53.066544196 +0000 UTC m=+743.098480479" watchObservedRunningTime="2026-01-25 00:18:53.070722548 +0000 UTC m=+743.102658831" Jan 25 00:18:55 crc kubenswrapper[4985]: I0125 00:18:55.931995 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elastic-operator-79b7456484-pltwd" event={"ID":"e0cc61de-66cc-4674-8ee8-9039337ead4c","Type":"ContainerStarted","Data":"1e78264b976594c62148976ef0fe5d324d6b633d979b872187cf73b5366cf390"} Jan 25 00:18:55 crc kubenswrapper[4985]: I0125 00:18:55.959649 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elastic-operator-79b7456484-pltwd" podStartSLOduration=6.183771501 podStartE2EDuration="8.959628607s" podCreationTimestamp="2026-01-25 00:18:47 +0000 UTC" firstStartedPulling="2026-01-25 00:18:52.534684522 +0000 UTC m=+742.566620815" lastFinishedPulling="2026-01-25 00:18:55.310541648 +0000 UTC m=+745.342477921" observedRunningTime="2026-01-25 00:18:55.958188498 +0000 UTC m=+745.990124801" watchObservedRunningTime="2026-01-25 00:18:55.959628607 +0000 UTC m=+745.991564880" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.679768 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.681505 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.684949 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-config" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685019 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-internal-users" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685438 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-dockercfg-2fsd5" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685450 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-default-es-transport-certs" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685775 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-remote-ca" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685782 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-http-certs-internal" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685964 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-scripts" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.685996 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"elasticsearch-es-unicast-hosts" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.686366 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-xpack-file-realm" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692024 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692100 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692165 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692204 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692248 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692280 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692322 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692451 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692530 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ceb530c6-05c0-4e6f-a0cb-100077e6777e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692597 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692633 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692687 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692721 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692791 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.692886 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.699228 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793776 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793835 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793863 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793892 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793918 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793942 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.793975 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794006 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ceb530c6-05c0-4e6f-a0cb-100077e6777e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794032 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794055 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794084 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794130 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794154 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794197 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794234 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794604 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-data\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-data\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794649 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-plugins-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-plugins-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.794754 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.795405 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-bin-local\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-bin-local\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.795582 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elasticsearch-logs\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elasticsearch-logs\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.795835 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp-volume\" (UniqueName: \"kubernetes.io/empty-dir/ceb530c6-05c0-4e6f-a0cb-100077e6777e-tmp-volume\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.796056 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-unicast-hosts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-unicast-hosts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.796363 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-scripts\" (UniqueName: \"kubernetes.io/configmap/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-scripts\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.799563 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-transport-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-transport-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.799671 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-elasticsearch-config\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-elasticsearch-config\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.799881 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-xpack-file-realm\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-xpack-file-realm\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.800043 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-remote-certificate-authorities\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-remote-certificate-authorities\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.800705 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-http-certificates\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-http-certificates\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.800824 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-internal-probe-user\" (UniqueName: \"kubernetes.io/secret/ceb530c6-05c0-4e6f-a0cb-100077e6777e-elastic-internal-probe-user\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:56 crc kubenswrapper[4985]: I0125 00:18:56.811031 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"downward-api\" (UniqueName: \"kubernetes.io/downward-api/ceb530c6-05c0-4e6f-a0cb-100077e6777e-downward-api\") pod \"elasticsearch-es-default-0\" (UID: \"ceb530c6-05c0-4e6f-a0cb-100077e6777e\") " pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:57 crc kubenswrapper[4985]: I0125 00:18:57.006038 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:18:57 crc kubenswrapper[4985]: I0125 00:18:57.258635 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 25 00:18:57 crc kubenswrapper[4985]: I0125 00:18:57.951463 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ceb530c6-05c0-4e6f-a0cb-100077e6777e","Type":"ContainerStarted","Data":"7c4cc0d3401a3bf5ce222cb95ed114943fff0cfecaa37b1727ff84a35e779484"} Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.357216 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w"] Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.365777 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.367369 4985 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager-operator"/"cert-manager-operator-controller-manager-dockercfg-2xqph" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.370219 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w"] Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.371875 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"kube-root-ca.crt" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.373086 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager-operator"/"openshift-service-ca.crt" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.440200 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.440251 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nc6hc\" (UniqueName: \"kubernetes.io/projected/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-kube-api-access-nc6hc\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.541171 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.541222 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nc6hc\" (UniqueName: \"kubernetes.io/projected/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-kube-api-access-nc6hc\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.541786 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmp\" (UniqueName: \"kubernetes.io/empty-dir/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-tmp\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.579671 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nc6hc\" (UniqueName: \"kubernetes.io/projected/3b0ad6d6-9957-4e98-b395-0f18a4ae7f95-kube-api-access-nc6hc\") pod \"cert-manager-operator-controller-manager-5446d6888b-5cj9w\" (UID: \"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95\") " pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.711788 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operators/perses-operator-5bf474d74f-xv6dg" Jan 25 00:19:00 crc kubenswrapper[4985]: I0125 00:19:00.753222 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" Jan 25 00:19:01 crc kubenswrapper[4985]: I0125 00:19:01.449940 4985 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Jan 25 00:19:02 crc kubenswrapper[4985]: I0125 00:19:02.142991 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w"] Jan 25 00:19:03 crc kubenswrapper[4985]: I0125 00:19:03.019925 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" event={"ID":"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95","Type":"ContainerStarted","Data":"ad7ecf7879140738c733f282650c7d9a72895c48582aa6179837acb3e6fb943f"} Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.528788 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head"] Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.531033 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.534573 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"smart-gateway-operator-catalog-configmap-partition-1" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.539217 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head"] Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.611609 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"smart-gateway-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-unzip\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.611677 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"smart-gateway-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-volume\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.611715 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd9x2\" (UniqueName: \"kubernetes.io/projected/094d08d7-b8d8-49dc-a770-9cd9933ffe86-kube-api-access-jd9x2\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.713951 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"smart-gateway-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-unzip\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.714046 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"smart-gateway-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-volume\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.714110 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd9x2\" (UniqueName: \"kubernetes.io/projected/094d08d7-b8d8-49dc-a770-9cd9933ffe86-kube-api-access-jd9x2\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.714978 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"smart-gateway-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-unzip\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.715957 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"smart-gateway-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/094d08d7-b8d8-49dc-a770-9cd9933ffe86-smart-gateway-operator-catalog-configmap-partition-1-volume\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.745654 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd9x2\" (UniqueName: \"kubernetes.io/projected/094d08d7-b8d8-49dc-a770-9cd9933ffe86-kube-api-access-jd9x2\") pod \"infrawatch-operators-smart-gateway-operator-bundle-nightly-head\" (UID: \"094d08d7-b8d8-49dc-a770-9cd9933ffe86\") " pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:19 crc kubenswrapper[4985]: I0125 00:19:19.854993 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.539539 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.540254 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cert-manager-operator,Image:registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911,Command:[/usr/bin/cert-manager-operator],Args:[start --v=$(OPERATOR_LOG_LEVEL) --trusted-ca-configmap=$(TRUSTED_CA_CONFIGMAP_NAME) --cloud-credentials-secret=$(CLOUD_CREDENTIALS_SECRET_NAME) --unsupported-addon-features=$(UNSUPPORTED_ADDON_FEATURES)],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:cert-manager-operator,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_WEBHOOK,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CA_INJECTOR,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_CONTROLLER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-rhel9@sha256:29a0fa1c2f2a6cee62a0468a3883d16d491b4af29130dad6e3e2bb2948f274df,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ACMESOLVER,Value:registry.redhat.io/cert-manager/jetstack-cert-manager-acmesolver-rhel9@sha256:ba937fc4b9eee31422914352c11a45b90754ba4fbe490ea45249b90afdc4e0a7,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CERT_MANAGER_ISTIOCSR,Value:registry.redhat.io/cert-manager/cert-manager-istio-csr-rhel9@sha256:af1ac813b8ee414ef215936f05197bc498bccbd540f3e2a93cb522221ba112bc,ValueFrom:nil,},EnvVar{Name:OPERAND_IMAGE_VERSION,Value:1.18.3,ValueFrom:nil,},EnvVar{Name:ISTIOCSR_OPERAND_IMAGE_VERSION,Value:0.14.2,ValueFrom:nil,},EnvVar{Name:OPERATOR_IMAGE_VERSION,Value:1.18.0,ValueFrom:nil,},EnvVar{Name:OPERATOR_LOG_LEVEL,Value:2,ValueFrom:nil,},EnvVar{Name:TRUSTED_CA_CONFIGMAP_NAME,Value:,ValueFrom:nil,},EnvVar{Name:CLOUD_CREDENTIALS_SECRET_NAME,Value:,ValueFrom:nil,},EnvVar{Name:UNSUPPORTED_ADDON_FEATURES,Value:,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:cert-manager-operator.v1.18.0,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{33554432 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:tmp,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nc6hc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000680000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cert-manager-operator-controller-manager-5446d6888b-5cj9w_cert-manager-operator(3b0ad6d6-9957-4e98-b395-0f18a4ae7f95): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.549624 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" podUID="3b0ad6d6-9957-4e98-b395-0f18a4ae7f95" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.952432 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="registry.connect.redhat.com/elastic/elasticsearch:7.17.20" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.953026 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:elastic-internal-init-filesystem,Image:registry.connect.redhat.com/elastic/elasticsearch:7.17.20,Command:[bash -c /mnt/elastic-internal/scripts/prepare-fs.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NODE_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:spec.nodeName,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:HEADLESS_SERVICE_NAME,Value:elasticsearch-es-default,ValueFrom:nil,},EnvVar{Name:PROBE_PASSWORD_PATH,Value:/mnt/elastic-internal/pod-mounted-users/elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:PROBE_USERNAME,Value:elastic-internal-probe,ValueFrom:nil,},EnvVar{Name:READINESS_PROBE_PROTOCOL,Value:https,ValueFrom:nil,},EnvVar{Name:NSS_SDB_USE_CACHE,Value:no,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Requests:ResourceList{cpu: {{100 -3} {} 100m DecimalSI},memory: {{52428800 0} {} 50Mi BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:downward-api,ReadOnly:true,MountPath:/mnt/elastic-internal/downward-api,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-bin-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-bin-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config,ReadOnly:true,MountPath:/mnt/elastic-internal/elasticsearch-config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-config-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-config-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-elasticsearch-plugins-local,ReadOnly:false,MountPath:/mnt/elastic-internal/elasticsearch-plugins-local,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-http-certificates,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/http-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-probe-user,ReadOnly:true,MountPath:/mnt/elastic-internal/pod-mounted-users,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-remote-certificate-authorities,ReadOnly:true,MountPath:/usr/share/elasticsearch/config/transport-remote-certs/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-scripts,ReadOnly:true,MountPath:/mnt/elastic-internal/scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-transport-certificates,ReadOnly:true,MountPath:/mnt/elastic-internal/transport-certificates,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-unicast-hosts,ReadOnly:true,MountPath:/mnt/elastic-internal/unicast-hosts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elastic-internal-xpack-file-realm,ReadOnly:true,MountPath:/mnt/elastic-internal/xpack-file-realm,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-data,ReadOnly:false,MountPath:/usr/share/elasticsearch/data,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:elasticsearch-logs,ReadOnly:false,MountPath:/usr/share/elasticsearch/logs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:tmp-volume,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:*false,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod elasticsearch-es-default-0_service-telemetry(ceb530c6-05c0-4e6f-a0cb-100077e6777e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 25 00:19:29 crc kubenswrapper[4985]: E0125 00:19:29.954364 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" Jan 25 00:19:30 crc kubenswrapper[4985]: I0125 00:19:30.172599 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head"] Jan 25 00:19:30 crc kubenswrapper[4985]: I0125 00:19:30.392038 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" event={"ID":"094d08d7-b8d8-49dc-a770-9cd9933ffe86","Type":"ContainerStarted","Data":"b6514836ad0eeff5ed9b31992b29637671947b6d9ea115ee8cd0b06884726996"} Jan 25 00:19:30 crc kubenswrapper[4985]: E0125 00:19:30.394664 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" Jan 25 00:19:30 crc kubenswrapper[4985]: E0125 00:19:30.394889 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cert-manager-operator\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/cert-manager/cert-manager-operator-rhel9@sha256:fa8de363ab4435c1085ac37f1bad488828c6ae8ba361c5f865c27ef577610911\\\"\"" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" podUID="3b0ad6d6-9957-4e98-b395-0f18a4ae7f95" Jan 25 00:19:30 crc kubenswrapper[4985]: I0125 00:19:30.592777 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 25 00:19:30 crc kubenswrapper[4985]: I0125 00:19:30.633674 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/elasticsearch-es-default-0"] Jan 25 00:19:31 crc kubenswrapper[4985]: E0125 00:19:31.400193 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" Jan 25 00:19:32 crc kubenswrapper[4985]: E0125 00:19:32.406830 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"elastic-internal-init-filesystem\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.connect.redhat.com/elastic/elasticsearch:7.17.20\\\"\"" pod="service-telemetry/elasticsearch-es-default-0" podUID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" Jan 25 00:19:35 crc kubenswrapper[4985]: I0125 00:19:35.836627 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:19:35 crc kubenswrapper[4985]: I0125 00:19:35.837371 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:19:36 crc kubenswrapper[4985]: I0125 00:19:36.426510 4985 generic.go:334] "Generic (PLEG): container finished" podID="094d08d7-b8d8-49dc-a770-9cd9933ffe86" containerID="d235df724354ab130516d3337a1848530565d98c9f97e529b923c1135b5c6586" exitCode=0 Jan 25 00:19:36 crc kubenswrapper[4985]: I0125 00:19:36.426603 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" event={"ID":"094d08d7-b8d8-49dc-a770-9cd9933ffe86","Type":"ContainerDied","Data":"d235df724354ab130516d3337a1848530565d98c9f97e529b923c1135b5c6586"} Jan 25 00:19:39 crc kubenswrapper[4985]: I0125 00:19:39.449475 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" event={"ID":"094d08d7-b8d8-49dc-a770-9cd9933ffe86","Type":"ContainerStarted","Data":"7e32961d2bb9da6c0359bf83924e9cbeac9f4dff87d58fe261b277333adc71de"} Jan 25 00:19:39 crc kubenswrapper[4985]: I0125 00:19:39.478476 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/infrawatch-operators-smart-gateway-operator-bundle-nightly-head" podStartSLOduration=11.692327752 podStartE2EDuration="20.47845103s" podCreationTimestamp="2026-01-25 00:19:19 +0000 UTC" firstStartedPulling="2026-01-25 00:19:30.18584596 +0000 UTC m=+780.217782273" lastFinishedPulling="2026-01-25 00:19:38.971969288 +0000 UTC m=+789.003905551" observedRunningTime="2026-01-25 00:19:39.472351087 +0000 UTC m=+789.504287370" watchObservedRunningTime="2026-01-25 00:19:39.47845103 +0000 UTC m=+789.510387333" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.201716 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns"] Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.202777 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.226537 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns"] Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.307847 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.308097 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.308248 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmlsl\" (UniqueName: \"kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.409405 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.409480 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.409548 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmlsl\" (UniqueName: \"kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.410248 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.410318 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.432046 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmlsl\" (UniqueName: \"kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl\") pod \"581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.523726 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:40 crc kubenswrapper[4985]: I0125 00:19:40.804041 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns"] Jan 25 00:19:41 crc kubenswrapper[4985]: I0125 00:19:41.466016 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" event={"ID":"07dd5f3f-f86a-4ea1-8917-c113d72aad8f","Type":"ContainerStarted","Data":"1d8e964b2ea8db96d98205c49f80dfa12541bd803808abe160fd868d2f745bd4"} Jan 25 00:19:42 crc kubenswrapper[4985]: I0125 00:19:42.473682 4985 generic.go:334] "Generic (PLEG): container finished" podID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerID="f690ff8421d1385a3ef7a630350f2d7c5e35fec93a09df4c303a06e96b2aff9e" exitCode=0 Jan 25 00:19:42 crc kubenswrapper[4985]: I0125 00:19:42.473741 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" event={"ID":"07dd5f3f-f86a-4ea1-8917-c113d72aad8f","Type":"ContainerDied","Data":"f690ff8421d1385a3ef7a630350f2d7c5e35fec93a09df4c303a06e96b2aff9e"} Jan 25 00:19:43 crc kubenswrapper[4985]: I0125 00:19:43.480727 4985 generic.go:334] "Generic (PLEG): container finished" podID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerID="2100a0ff761d592b97fa70dee4358d080f14eb9e93463df4d450037598e002e8" exitCode=0 Jan 25 00:19:43 crc kubenswrapper[4985]: I0125 00:19:43.480780 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" event={"ID":"07dd5f3f-f86a-4ea1-8917-c113d72aad8f","Type":"ContainerDied","Data":"2100a0ff761d592b97fa70dee4358d080f14eb9e93463df4d450037598e002e8"} Jan 25 00:19:44 crc kubenswrapper[4985]: I0125 00:19:44.491272 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" event={"ID":"3b0ad6d6-9957-4e98-b395-0f18a4ae7f95","Type":"ContainerStarted","Data":"ef77931e7774e2c4ece1b492863cae9b5e5366044394a3b725ecd7d530f6cf6d"} Jan 25 00:19:44 crc kubenswrapper[4985]: I0125 00:19:44.494393 4985 generic.go:334] "Generic (PLEG): container finished" podID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerID="281acf40bb3e46df03704ec4258ab56d9b9bdfb785c66693739ea849ec9ea2f5" exitCode=0 Jan 25 00:19:44 crc kubenswrapper[4985]: I0125 00:19:44.494555 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" event={"ID":"07dd5f3f-f86a-4ea1-8917-c113d72aad8f","Type":"ContainerDied","Data":"281acf40bb3e46df03704ec4258ab56d9b9bdfb785c66693739ea849ec9ea2f5"} Jan 25 00:19:44 crc kubenswrapper[4985]: I0125 00:19:44.528373 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager-operator/cert-manager-operator-controller-manager-5446d6888b-5cj9w" podStartSLOduration=2.428835413 podStartE2EDuration="44.5283473s" podCreationTimestamp="2026-01-25 00:19:00 +0000 UTC" firstStartedPulling="2026-01-25 00:19:02.152259137 +0000 UTC m=+752.184195410" lastFinishedPulling="2026-01-25 00:19:44.251771014 +0000 UTC m=+794.283707297" observedRunningTime="2026-01-25 00:19:44.522661269 +0000 UTC m=+794.554597572" watchObservedRunningTime="2026-01-25 00:19:44.5283473 +0000 UTC m=+794.560283603" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.774722 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.890233 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmlsl\" (UniqueName: \"kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl\") pod \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.890326 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle\") pod \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.890348 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util\") pod \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\" (UID: \"07dd5f3f-f86a-4ea1-8917-c113d72aad8f\") " Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.891718 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle" (OuterVolumeSpecName: "bundle") pod "07dd5f3f-f86a-4ea1-8917-c113d72aad8f" (UID: "07dd5f3f-f86a-4ea1-8917-c113d72aad8f"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.896762 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl" (OuterVolumeSpecName: "kube-api-access-gmlsl") pod "07dd5f3f-f86a-4ea1-8917-c113d72aad8f" (UID: "07dd5f3f-f86a-4ea1-8917-c113d72aad8f"). InnerVolumeSpecName "kube-api-access-gmlsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.903861 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util" (OuterVolumeSpecName: "util") pod "07dd5f3f-f86a-4ea1-8917-c113d72aad8f" (UID: "07dd5f3f-f86a-4ea1-8917-c113d72aad8f"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.991926 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmlsl\" (UniqueName: \"kubernetes.io/projected/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-kube-api-access-gmlsl\") on node \"crc\" DevicePath \"\"" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.991984 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:19:45 crc kubenswrapper[4985]: I0125 00:19:45.992005 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/07dd5f3f-f86a-4ea1-8917-c113d72aad8f-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:19:46 crc kubenswrapper[4985]: I0125 00:19:46.505528 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" event={"ID":"07dd5f3f-f86a-4ea1-8917-c113d72aad8f","Type":"ContainerDied","Data":"1d8e964b2ea8db96d98205c49f80dfa12541bd803808abe160fd868d2f745bd4"} Jan 25 00:19:46 crc kubenswrapper[4985]: I0125 00:19:46.505569 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1d8e964b2ea8db96d98205c49f80dfa12541bd803808abe160fd868d2f745bd4" Jan 25 00:19:46 crc kubenswrapper[4985]: I0125 00:19:46.505636 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/581064c273eeb770c9fbc3e03ee675cb542f06b12d97607b3aad9766615c6ns" Jan 25 00:19:48 crc kubenswrapper[4985]: I0125 00:19:48.518280 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ceb530c6-05c0-4e6f-a0cb-100077e6777e","Type":"ContainerStarted","Data":"84e49c7c69beab50670f85684d7d4c65fb82391fdc801867c9af28e85623e8eb"} Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.196811 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5kq2z"] Jan 25 00:19:49 crc kubenswrapper[4985]: E0125 00:19:49.197120 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="util" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.197140 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="util" Jan 25 00:19:49 crc kubenswrapper[4985]: E0125 00:19:49.197157 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="pull" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.197165 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="pull" Jan 25 00:19:49 crc kubenswrapper[4985]: E0125 00:19:49.197183 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="extract" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.197191 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="extract" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.197309 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="07dd5f3f-f86a-4ea1-8917-c113d72aad8f" containerName="extract" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.197819 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.203254 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5kq2z"] Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.204145 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.204299 4985 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-qvww2" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.204338 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.234261 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.234300 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4r57\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-kube-api-access-r4r57\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.334881 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.334923 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4r57\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-kube-api-access-r4r57\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.352490 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-bound-sa-token\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.374825 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4r57\" (UniqueName: \"kubernetes.io/projected/a5f5a37d-d901-4929-90c0-91d99f3cd65b-kube-api-access-r4r57\") pod \"cert-manager-webhook-f4fb5df64-5kq2z\" (UID: \"a5f5a37d-d901-4929-90c0-91d99f3cd65b\") " pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.511281 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.541174 4985 generic.go:334] "Generic (PLEG): container finished" podID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" containerID="84e49c7c69beab50670f85684d7d4c65fb82391fdc801867c9af28e85623e8eb" exitCode=0 Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.541214 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ceb530c6-05c0-4e6f-a0cb-100077e6777e","Type":"ContainerDied","Data":"84e49c7c69beab50670f85684d7d4c65fb82391fdc801867c9af28e85623e8eb"} Jan 25 00:19:49 crc kubenswrapper[4985]: I0125 00:19:49.866882 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-f4fb5df64-5kq2z"] Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.164392 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf"] Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.165376 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.169252 4985 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-69nbr" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.179650 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf"] Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.349961 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.350305 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl5r9\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-kube-api-access-dl5r9\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.452977 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.453072 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl5r9\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-kube-api-access-dl5r9\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.485095 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl5r9\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-kube-api-access-dl5r9\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.486523 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/1e15f504-62d6-4982-9729-34f3bbd5f784-bound-sa-token\") pod \"cert-manager-cainjector-855d9ccff4-z9zrf\" (UID: \"1e15f504-62d6-4982-9729-34f3bbd5f784\") " pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.549246 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" event={"ID":"a5f5a37d-d901-4929-90c0-91d99f3cd65b","Type":"ContainerStarted","Data":"b4c690ff84acb6ef4c928095e189850718e46307c8ccde7cadfe15ae351e4514"} Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.550948 4985 generic.go:334] "Generic (PLEG): container finished" podID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" containerID="1d79081be984c8a88d156a3082a41a30b7de371776e17c2cc6284b1e0e50596b" exitCode=0 Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.550977 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ceb530c6-05c0-4e6f-a0cb-100077e6777e","Type":"ContainerDied","Data":"1d79081be984c8a88d156a3082a41a30b7de371776e17c2cc6284b1e0e50596b"} Jan 25 00:19:50 crc kubenswrapper[4985]: I0125 00:19:50.781654 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" Jan 25 00:19:51 crc kubenswrapper[4985]: I0125 00:19:51.221088 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf"] Jan 25 00:19:51 crc kubenswrapper[4985]: W0125 00:19:51.233151 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e15f504_62d6_4982_9729_34f3bbd5f784.slice/crio-9a0903b1e8a22439a41d2d1a3bdee1c6d9fd477b480e0349007b7016125124d6 WatchSource:0}: Error finding container 9a0903b1e8a22439a41d2d1a3bdee1c6d9fd477b480e0349007b7016125124d6: Status 404 returned error can't find the container with id 9a0903b1e8a22439a41d2d1a3bdee1c6d9fd477b480e0349007b7016125124d6 Jan 25 00:19:51 crc kubenswrapper[4985]: I0125 00:19:51.568638 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" event={"ID":"1e15f504-62d6-4982-9729-34f3bbd5f784","Type":"ContainerStarted","Data":"9a0903b1e8a22439a41d2d1a3bdee1c6d9fd477b480e0349007b7016125124d6"} Jan 25 00:19:51 crc kubenswrapper[4985]: I0125 00:19:51.572325 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/elasticsearch-es-default-0" event={"ID":"ceb530c6-05c0-4e6f-a0cb-100077e6777e","Type":"ContainerStarted","Data":"f1c8d3de8714bbebba97c580972c49ccbcc3047cf3424b3d878be62cfb1f735a"} Jan 25 00:19:51 crc kubenswrapper[4985]: I0125 00:19:51.573265 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.021307 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/elasticsearch-es-default-0" podStartSLOduration=6.90110412 podStartE2EDuration="57.021285817s" podCreationTimestamp="2026-01-25 00:18:56 +0000 UTC" firstStartedPulling="2026-01-25 00:18:57.278010991 +0000 UTC m=+747.309947274" lastFinishedPulling="2026-01-25 00:19:47.398192698 +0000 UTC m=+797.430128971" observedRunningTime="2026-01-25 00:19:51.61326011 +0000 UTC m=+801.645196403" watchObservedRunningTime="2026-01-25 00:19:53.021285817 +0000 UTC m=+803.053222090" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.023539 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h"] Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.024222 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.031931 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-operator-dockercfg-pszwd" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.044306 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h"] Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.093486 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1f53aed3-9734-4492-bd91-ba42c17ae773-runner\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.093606 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd5fg\" (UniqueName: \"kubernetes.io/projected/1f53aed3-9734-4492-bd91-ba42c17ae773-kube-api-access-dd5fg\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.195949 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1f53aed3-9734-4492-bd91-ba42c17ae773-runner\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.196067 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd5fg\" (UniqueName: \"kubernetes.io/projected/1f53aed3-9734-4492-bd91-ba42c17ae773-kube-api-access-dd5fg\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.196508 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/1f53aed3-9734-4492-bd91-ba42c17ae773-runner\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.225413 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd5fg\" (UniqueName: \"kubernetes.io/projected/1f53aed3-9734-4492-bd91-ba42c17ae773-kube-api-access-dd5fg\") pod \"smart-gateway-operator-bbbc889bc-wnp6h\" (UID: \"1f53aed3-9734-4492-bd91-ba42c17ae773\") " pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.354406 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" Jan 25 00:19:53 crc kubenswrapper[4985]: I0125 00:19:53.707482 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h"] Jan 25 00:19:54 crc kubenswrapper[4985]: I0125 00:19:54.596342 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" event={"ID":"1f53aed3-9734-4492-bd91-ba42c17ae773","Type":"ContainerStarted","Data":"cee1d8f573689c510def67fc88e0e6cb6ead528dfa63f404e2e43f6007f022a2"} Jan 25 00:19:59 crc kubenswrapper[4985]: I0125 00:19:59.631990 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" event={"ID":"1e15f504-62d6-4982-9729-34f3bbd5f784","Type":"ContainerStarted","Data":"90f8f8f675cba7b7a2de2ef5a94b922125f764155deca7e32e1d32a9e0cc1a47"} Jan 25 00:19:59 crc kubenswrapper[4985]: I0125 00:19:59.634433 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" event={"ID":"a5f5a37d-d901-4929-90c0-91d99f3cd65b","Type":"ContainerStarted","Data":"57fb286364786b8bbb3339898d935c539ce7e2e7b1559bcf5d99e581177a8fe8"} Jan 25 00:19:59 crc kubenswrapper[4985]: I0125 00:19:59.634554 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:19:59 crc kubenswrapper[4985]: I0125 00:19:59.650436 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-855d9ccff4-z9zrf" podStartSLOduration=1.823646315 podStartE2EDuration="9.650419746s" podCreationTimestamp="2026-01-25 00:19:50 +0000 UTC" firstStartedPulling="2026-01-25 00:19:51.239648548 +0000 UTC m=+801.271584821" lastFinishedPulling="2026-01-25 00:19:59.066421979 +0000 UTC m=+809.098358252" observedRunningTime="2026-01-25 00:19:59.644510258 +0000 UTC m=+809.676446531" watchObservedRunningTime="2026-01-25 00:19:59.650419746 +0000 UTC m=+809.682356019" Jan 25 00:19:59 crc kubenswrapper[4985]: I0125 00:19:59.673610 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" podStartSLOduration=1.5330712960000001 podStartE2EDuration="10.673589613s" podCreationTimestamp="2026-01-25 00:19:49 +0000 UTC" firstStartedPulling="2026-01-25 00:19:49.905229902 +0000 UTC m=+799.937166175" lastFinishedPulling="2026-01-25 00:19:59.045748209 +0000 UTC m=+809.077684492" observedRunningTime="2026-01-25 00:19:59.659762785 +0000 UTC m=+809.691699058" watchObservedRunningTime="2026-01-25 00:19:59.673589613 +0000 UTC m=+809.705525896" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.676489 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-86cb77c54b-mq7gd"] Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.677451 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.679648 4985 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-dfx8l" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.688199 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-mq7gd"] Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.819276 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k5wj\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-kube-api-access-7k5wj\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.819400 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-bound-sa-token\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.920301 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-bound-sa-token\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.920375 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k5wj\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-kube-api-access-7k5wj\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:01 crc kubenswrapper[4985]: I0125 00:20:01.938132 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k5wj\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-kube-api-access-7k5wj\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:02 crc kubenswrapper[4985]: I0125 00:20:02.108838 4985 prober.go:107] "Probe failed" probeType="Readiness" pod="service-telemetry/elasticsearch-es-default-0" podUID="ceb530c6-05c0-4e6f-a0cb-100077e6777e" containerName="elasticsearch" probeResult="failure" output=< Jan 25 00:20:02 crc kubenswrapper[4985]: {"timestamp": "2026-01-25T00:20:02+00:00", "message": "readiness probe failed", "curl_rc": "7"} Jan 25 00:20:02 crc kubenswrapper[4985]: > Jan 25 00:20:02 crc kubenswrapper[4985]: I0125 00:20:02.728871 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e75df2f5-fd78-42d9-8bb1-65e73697c5bc-bound-sa-token\") pod \"cert-manager-86cb77c54b-mq7gd\" (UID: \"e75df2f5-fd78-42d9-8bb1-65e73697c5bc\") " pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:02 crc kubenswrapper[4985]: I0125 00:20:02.893277 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-86cb77c54b-mq7gd" Jan 25 00:20:03 crc kubenswrapper[4985]: I0125 00:20:03.430678 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-86cb77c54b-mq7gd"] Jan 25 00:20:03 crc kubenswrapper[4985]: W0125 00:20:03.449586 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode75df2f5_fd78_42d9_8bb1_65e73697c5bc.slice/crio-ddff20351f92e1c99dbf38f57b0b4bc1ae3214793594ca149b424d294932407a WatchSource:0}: Error finding container ddff20351f92e1c99dbf38f57b0b4bc1ae3214793594ca149b424d294932407a: Status 404 returned error can't find the container with id ddff20351f92e1c99dbf38f57b0b4bc1ae3214793594ca149b424d294932407a Jan 25 00:20:03 crc kubenswrapper[4985]: I0125 00:20:03.670682 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-mq7gd" event={"ID":"e75df2f5-fd78-42d9-8bb1-65e73697c5bc","Type":"ContainerStarted","Data":"a9c7cff0f692eed3f3ffddbb6048c60b50312b6e99d88ae12757fc54618780cf"} Jan 25 00:20:03 crc kubenswrapper[4985]: I0125 00:20:03.670738 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-86cb77c54b-mq7gd" event={"ID":"e75df2f5-fd78-42d9-8bb1-65e73697c5bc","Type":"ContainerStarted","Data":"ddff20351f92e1c99dbf38f57b0b4bc1ae3214793594ca149b424d294932407a"} Jan 25 00:20:03 crc kubenswrapper[4985]: I0125 00:20:03.689783 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-86cb77c54b-mq7gd" podStartSLOduration=2.689755377 podStartE2EDuration="2.689755377s" podCreationTimestamp="2026-01-25 00:20:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:20:03.68499089 +0000 UTC m=+813.716927183" watchObservedRunningTime="2026-01-25 00:20:03.689755377 +0000 UTC m=+813.721691650" Jan 25 00:20:04 crc kubenswrapper[4985]: I0125 00:20:04.513984 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-f4fb5df64-5kq2z" Jan 25 00:20:05 crc kubenswrapper[4985]: I0125 00:20:05.836492 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:20:05 crc kubenswrapper[4985]: I0125 00:20:05.836549 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:20:07 crc kubenswrapper[4985]: I0125 00:20:07.248902 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/elasticsearch-es-default-0" Jan 25 00:20:21 crc kubenswrapper[4985]: E0125 00:20:21.257149 4985 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/infrawatch/smart-gateway-operator:latest" Jan 25 00:20:21 crc kubenswrapper[4985]: E0125 00:20:21.257699 4985 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/infrawatch/smart-gateway-operator:latest,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:WATCH_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.annotations['olm.targetNamespaces'],},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:POD_NAME,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.name,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:OPERATOR_NAME,Value:smart-gateway-operator,ValueFrom:nil,},EnvVar{Name:ANSIBLE_GATHERING,Value:explicit,ValueFrom:nil,},EnvVar{Name:ANSIBLE_VERBOSITY_SMARTGATEWAY_SMARTGATEWAY_INFRA_WATCH,Value:4,ValueFrom:nil,},EnvVar{Name:ANSIBLE_DEBUG_LOGS,Value:true,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_CORE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-core:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_BRIDGE_SMARTGATEWAY_IMAGE,Value:quay.io/infrawatch/sg-bridge:latest,ValueFrom:nil,},EnvVar{Name:RELATED_IMAGE_OAUTH_PROXY_IMAGE,Value:quay.io/openshift/origin-oauth-proxy:latest,ValueFrom:nil,},EnvVar{Name:OPERATOR_CONDITION_NAME,Value:smart-gateway-operator.v5.0.1768085178,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:runner,ReadOnly:false,MountPath:/tmp/ansible-operator/runner,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dd5fg,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000670000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod smart-gateway-operator-bbbc889bc-wnp6h_service-telemetry(1f53aed3-9734-4492-bd91-ba42c17ae773): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Jan 25 00:20:21 crc kubenswrapper[4985]: E0125 00:20:21.258882 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" podUID="1f53aed3-9734-4492-bd91-ba42c17ae773" Jan 25 00:20:21 crc kubenswrapper[4985]: E0125 00:20:21.822809 4985 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/infrawatch/smart-gateway-operator:latest\\\"\"" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" podUID="1f53aed3-9734-4492-bd91-ba42c17ae773" Jan 25 00:20:35 crc kubenswrapper[4985]: I0125 00:20:35.836274 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:20:35 crc kubenswrapper[4985]: I0125 00:20:35.836884 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:20:35 crc kubenswrapper[4985]: I0125 00:20:35.836936 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:20:35 crc kubenswrapper[4985]: I0125 00:20:35.837642 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:20:35 crc kubenswrapper[4985]: I0125 00:20:35.837700 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01" gracePeriod=600 Jan 25 00:20:36 crc kubenswrapper[4985]: I0125 00:20:36.945967 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01" exitCode=0 Jan 25 00:20:36 crc kubenswrapper[4985]: I0125 00:20:36.946225 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01"} Jan 25 00:20:36 crc kubenswrapper[4985]: I0125 00:20:36.946258 4985 scope.go:117] "RemoveContainer" containerID="6720bc1f53fe4d6d06d4cc9c6ab134d539fb2a6884da52dcd36cde67e4f5afdd" Jan 25 00:20:37 crc kubenswrapper[4985]: I0125 00:20:37.958362 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" event={"ID":"1f53aed3-9734-4492-bd91-ba42c17ae773","Type":"ContainerStarted","Data":"01c9468910ce55a0e09f4659379243985eea756ea5f01f80edd4fb8e376c4314"} Jan 25 00:20:37 crc kubenswrapper[4985]: I0125 00:20:37.962520 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d"} Jan 25 00:20:37 crc kubenswrapper[4985]: I0125 00:20:37.988941 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/smart-gateway-operator-bbbc889bc-wnp6h" podStartSLOduration=1.818998626 podStartE2EDuration="44.988910434s" podCreationTimestamp="2026-01-25 00:19:53 +0000 UTC" firstStartedPulling="2026-01-25 00:19:53.728931988 +0000 UTC m=+803.760868261" lastFinishedPulling="2026-01-25 00:20:36.898843796 +0000 UTC m=+846.930780069" observedRunningTime="2026-01-25 00:20:37.980943751 +0000 UTC m=+848.012880114" watchObservedRunningTime="2026-01-25 00:20:37.988910434 +0000 UTC m=+848.020846747" Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.907466 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head"] Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.909128 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.912655 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"service-telemetry-operator-catalog-configmap-partition-1" Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.925488 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head"] Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.964436 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-unzip\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.964559 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-telemetry-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-volume\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:05 crc kubenswrapper[4985]: I0125 00:21:05.964621 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7mg74\" (UniqueName: \"kubernetes.io/projected/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-kube-api-access-7mg74\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.066037 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-unzip\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.066217 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-telemetry-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-volume\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.066306 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7mg74\" (UniqueName: \"kubernetes.io/projected/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-kube-api-access-7mg74\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.066635 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-operator-catalog-configmap-partition-1-unzip\" (UniqueName: \"kubernetes.io/empty-dir/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-unzip\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.067189 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-telemetry-operator-catalog-configmap-partition-1-volume\" (UniqueName: \"kubernetes.io/configmap/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-service-telemetry-operator-catalog-configmap-partition-1-volume\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.101072 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7mg74\" (UniqueName: \"kubernetes.io/projected/7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92-kube-api-access-7mg74\") pod \"awatch-operators-service-telemetry-operator-bundle-nightly-head\" (UID: \"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92\") " pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.277558 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" Jan 25 00:21:06 crc kubenswrapper[4985]: I0125 00:21:06.477680 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head"] Jan 25 00:21:06 crc kubenswrapper[4985]: W0125 00:21:06.488417 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7a7dc13d_b8de_41ca_b4f2_1be26a4d0f92.slice/crio-e4ccf4b0e7e256b2a9396daa5e453fe393c876d6e848c1b744420ebc599f7ec8 WatchSource:0}: Error finding container e4ccf4b0e7e256b2a9396daa5e453fe393c876d6e848c1b744420ebc599f7ec8: Status 404 returned error can't find the container with id e4ccf4b0e7e256b2a9396daa5e453fe393c876d6e848c1b744420ebc599f7ec8 Jan 25 00:21:07 crc kubenswrapper[4985]: I0125 00:21:07.165693 4985 generic.go:334] "Generic (PLEG): container finished" podID="7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92" containerID="a5aed20cda79b0343caff5beaed99c66973e95d6b9fc81774ec72fa95d84ec60" exitCode=0 Jan 25 00:21:07 crc kubenswrapper[4985]: I0125 00:21:07.165752 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" event={"ID":"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92","Type":"ContainerDied","Data":"a5aed20cda79b0343caff5beaed99c66973e95d6b9fc81774ec72fa95d84ec60"} Jan 25 00:21:07 crc kubenswrapper[4985]: I0125 00:21:07.165924 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" event={"ID":"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92","Type":"ContainerStarted","Data":"e4ccf4b0e7e256b2a9396daa5e453fe393c876d6e848c1b744420ebc599f7ec8"} Jan 25 00:21:08 crc kubenswrapper[4985]: I0125 00:21:08.177881 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" event={"ID":"7a7dc13d-b8de-41ca-b4f2-1be26a4d0f92","Type":"ContainerStarted","Data":"08a4634d2fe54a8b21b84025b4e5d897f10bc5d44442b85091620a6365b12a73"} Jan 25 00:21:08 crc kubenswrapper[4985]: I0125 00:21:08.196851 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/awatch-operators-service-telemetry-operator-bundle-nightly-head" podStartSLOduration=2.634428481 podStartE2EDuration="3.196834272s" podCreationTimestamp="2026-01-25 00:21:05 +0000 UTC" firstStartedPulling="2026-01-25 00:21:07.167470641 +0000 UTC m=+877.199406924" lastFinishedPulling="2026-01-25 00:21:07.729876402 +0000 UTC m=+877.761812715" observedRunningTime="2026-01-25 00:21:08.195074465 +0000 UTC m=+878.227010808" watchObservedRunningTime="2026-01-25 00:21:08.196834272 +0000 UTC m=+878.228770545" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.110934 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5"] Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.112154 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.115380 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.123126 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5"] Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.212388 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncwnk\" (UniqueName: \"kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.212494 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.212722 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.314468 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.314547 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.314659 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncwnk\" (UniqueName: \"kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.315192 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.315473 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.332701 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncwnk\" (UniqueName: \"kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk\") pod \"6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.428220 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:09 crc kubenswrapper[4985]: W0125 00:21:09.880249 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2acbb776_e15d_49e2_b2e0_4e1e1ad82ddc.slice/crio-6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a WatchSource:0}: Error finding container 6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a: Status 404 returned error can't find the container with id 6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.885400 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5"] Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.929181 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb"] Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.930785 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:09 crc kubenswrapper[4985]: I0125 00:21:09.947007 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb"] Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.023667 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lg5c2\" (UniqueName: \"kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.023709 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.023759 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.124871 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lg5c2\" (UniqueName: \"kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.124926 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.124981 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.125438 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.125573 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.142439 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lg5c2\" (UniqueName: \"kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2\") pod \"59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.190567 4985 generic.go:334] "Generic (PLEG): container finished" podID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerID="c283c63bb9c9170e211c5334eb7fc66ed1f64a36571b634133a0bac149162f6a" exitCode=0 Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.190608 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" event={"ID":"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc","Type":"ContainerDied","Data":"c283c63bb9c9170e211c5334eb7fc66ed1f64a36571b634133a0bac149162f6a"} Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.190636 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" event={"ID":"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc","Type":"ContainerStarted","Data":"6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a"} Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.315225 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:10 crc kubenswrapper[4985]: I0125 00:21:10.524125 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb"] Jan 25 00:21:11 crc kubenswrapper[4985]: I0125 00:21:11.197654 4985 generic.go:334] "Generic (PLEG): container finished" podID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerID="069e879dd8d514386bcd9751e4ea0c5d67830ca824ce6b6ce7125c227e47afa7" exitCode=0 Jan 25 00:21:11 crc kubenswrapper[4985]: I0125 00:21:11.197793 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" event={"ID":"08fda121-6ca2-47d1-bb33-6fc26c53f5d1","Type":"ContainerDied","Data":"069e879dd8d514386bcd9751e4ea0c5d67830ca824ce6b6ce7125c227e47afa7"} Jan 25 00:21:11 crc kubenswrapper[4985]: I0125 00:21:11.197896 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" event={"ID":"08fda121-6ca2-47d1-bb33-6fc26c53f5d1","Type":"ContainerStarted","Data":"c9f0e140dbc47dbbeb11bbdaf66fc87253f7f59df07da40f8d88df0c634cbf50"} Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.210415 4985 generic.go:334] "Generic (PLEG): container finished" podID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerID="02982d9082272765af0e49ec76f40027e59ca9c118c7eb193aed09074a64f205" exitCode=0 Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.210461 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" event={"ID":"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc","Type":"ContainerDied","Data":"02982d9082272765af0e49ec76f40027e59ca9c118c7eb193aed09074a64f205"} Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.869214 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.871863 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.884406 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.965720 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5krz9\" (UniqueName: \"kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.966479 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:12 crc kubenswrapper[4985]: I0125 00:21:12.966773 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.068094 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5krz9\" (UniqueName: \"kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.068445 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.068585 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.068943 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.069034 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.093421 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5krz9\" (UniqueName: \"kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9\") pod \"redhat-operators-j5xvf\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.196956 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.219338 4985 generic.go:334] "Generic (PLEG): container finished" podID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerID="d735c8c8406148419d3c7236110e3629d767d024fa0cd2f4a0ad87ac0f614a50" exitCode=0 Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.219405 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" event={"ID":"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc","Type":"ContainerDied","Data":"d735c8c8406148419d3c7236110e3629d767d024fa0cd2f4a0ad87ac0f614a50"} Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.223959 4985 generic.go:334] "Generic (PLEG): container finished" podID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerID="da2356878de32584049e18b13e2ac6e4f37481ef423c2d8ab6d328ceffc70c92" exitCode=0 Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.223997 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" event={"ID":"08fda121-6ca2-47d1-bb33-6fc26c53f5d1","Type":"ContainerDied","Data":"da2356878de32584049e18b13e2ac6e4f37481ef423c2d8ab6d328ceffc70c92"} Jan 25 00:21:13 crc kubenswrapper[4985]: I0125 00:21:13.677123 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.231337 4985 generic.go:334] "Generic (PLEG): container finished" podID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerID="cee80f9f73ba22eae38e6a400c61c30effa599200dca2d1af8630859666f5d56" exitCode=0 Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.231420 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerDied","Data":"cee80f9f73ba22eae38e6a400c61c30effa599200dca2d1af8630859666f5d56"} Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.231463 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerStarted","Data":"95911c680c67c06d88594eb1358c46ce36a293da83d787d8b294e89ede493891"} Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.234775 4985 generic.go:334] "Generic (PLEG): container finished" podID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerID="e0ac9bcef25f6f3221f935b67ce4a978769cc8386977717ca658e4477b627df5" exitCode=0 Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.234856 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" event={"ID":"08fda121-6ca2-47d1-bb33-6fc26c53f5d1","Type":"ContainerDied","Data":"e0ac9bcef25f6f3221f935b67ce4a978769cc8386977717ca658e4477b627df5"} Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.519529 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.591415 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ncwnk\" (UniqueName: \"kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk\") pod \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.591740 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util\") pod \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.591778 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle\") pod \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\" (UID: \"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc\") " Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.592644 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle" (OuterVolumeSpecName: "bundle") pod "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" (UID: "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.596951 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk" (OuterVolumeSpecName: "kube-api-access-ncwnk") pod "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" (UID: "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc"). InnerVolumeSpecName "kube-api-access-ncwnk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.607057 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util" (OuterVolumeSpecName: "util") pod "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" (UID: "2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.693596 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ncwnk\" (UniqueName: \"kubernetes.io/projected/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-kube-api-access-ncwnk\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.693628 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:14 crc kubenswrapper[4985]: I0125 00:21:14.693639 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.253758 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.256450 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5" event={"ID":"2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc","Type":"ContainerDied","Data":"6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a"} Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.256504 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6caeafa5ee2f31124d24eb58ae677bb6bc160680724fa3f91d32fe0e5b92e91a" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.528669 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.607779 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle\") pod \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.607859 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lg5c2\" (UniqueName: \"kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2\") pod \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.607912 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util\") pod \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\" (UID: \"08fda121-6ca2-47d1-bb33-6fc26c53f5d1\") " Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.608864 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle" (OuterVolumeSpecName: "bundle") pod "08fda121-6ca2-47d1-bb33-6fc26c53f5d1" (UID: "08fda121-6ca2-47d1-bb33-6fc26c53f5d1"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.612322 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2" (OuterVolumeSpecName: "kube-api-access-lg5c2") pod "08fda121-6ca2-47d1-bb33-6fc26c53f5d1" (UID: "08fda121-6ca2-47d1-bb33-6fc26c53f5d1"). InnerVolumeSpecName "kube-api-access-lg5c2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.628415 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util" (OuterVolumeSpecName: "util") pod "08fda121-6ca2-47d1-bb33-6fc26c53f5d1" (UID: "08fda121-6ca2-47d1-bb33-6fc26c53f5d1"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.709364 4985 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-util\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.709414 4985 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-bundle\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:15 crc kubenswrapper[4985]: I0125 00:21:15.709428 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lg5c2\" (UniqueName: \"kubernetes.io/projected/08fda121-6ca2-47d1-bb33-6fc26c53f5d1-kube-api-access-lg5c2\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:16 crc kubenswrapper[4985]: I0125 00:21:16.274516 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" Jan 25 00:21:16 crc kubenswrapper[4985]: I0125 00:21:16.277273 4985 generic.go:334] "Generic (PLEG): container finished" podID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerID="79dca948dc9d489283eda126f9048c1e3cb8be2a566823c399319ab6a33d7549" exitCode=0 Jan 25 00:21:16 crc kubenswrapper[4985]: I0125 00:21:16.288864 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/59d91eeadfbc177692af3c8c1571c9d473bd01e833d0373cf802b3d5728ljtb" event={"ID":"08fda121-6ca2-47d1-bb33-6fc26c53f5d1","Type":"ContainerDied","Data":"c9f0e140dbc47dbbeb11bbdaf66fc87253f7f59df07da40f8d88df0c634cbf50"} Jan 25 00:21:16 crc kubenswrapper[4985]: I0125 00:21:16.288890 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9f0e140dbc47dbbeb11bbdaf66fc87253f7f59df07da40f8d88df0c634cbf50" Jan 25 00:21:16 crc kubenswrapper[4985]: I0125 00:21:16.288900 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerDied","Data":"79dca948dc9d489283eda126f9048c1e3cb8be2a566823c399319ab6a33d7549"} Jan 25 00:21:17 crc kubenswrapper[4985]: I0125 00:21:17.289292 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerStarted","Data":"d5de0ec1931b5ff9ca008b5ad0e8387b17cdcba67cc8c22adb03ea4e0f27dbc1"} Jan 25 00:21:17 crc kubenswrapper[4985]: I0125 00:21:17.320608 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-j5xvf" podStartSLOduration=2.816984921 podStartE2EDuration="5.320580512s" podCreationTimestamp="2026-01-25 00:21:12 +0000 UTC" firstStartedPulling="2026-01-25 00:21:14.234113704 +0000 UTC m=+884.266049977" lastFinishedPulling="2026-01-25 00:21:16.737709265 +0000 UTC m=+886.769645568" observedRunningTime="2026-01-25 00:21:17.313355189 +0000 UTC m=+887.345291502" watchObservedRunningTime="2026-01-25 00:21:17.320580512 +0000 UTC m=+887.352516825" Jan 25 00:21:23 crc kubenswrapper[4985]: I0125 00:21:23.197968 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:23 crc kubenswrapper[4985]: I0125 00:21:23.198759 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.263881 4985 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-j5xvf" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="registry-server" probeResult="failure" output=< Jan 25 00:21:24 crc kubenswrapper[4985]: timeout: failed to connect service ":50051" within 1s Jan 25 00:21:24 crc kubenswrapper[4985]: > Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.458179 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb"] Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.458846 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.459000 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.459200 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="pull" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.459514 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="pull" Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.459663 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="pull" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.459795 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="pull" Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.459943 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.460073 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.460262 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="util" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.460391 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="util" Jan 25 00:21:24 crc kubenswrapper[4985]: E0125 00:21:24.460527 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="util" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.460650 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="util" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.461060 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fda121-6ca2-47d1-bb33-6fc26c53f5d1" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.461952 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc" containerName="extract" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.463041 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.470084 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"service-telemetry-operator-dockercfg-xtb5h" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.475352 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb"] Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.534632 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/8db9b718-8110-4d6a-9082-26ebabcf60c6-runner\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.535002 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62jmc\" (UniqueName: \"kubernetes.io/projected/8db9b718-8110-4d6a-9082-26ebabcf60c6-kube-api-access-62jmc\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.636457 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62jmc\" (UniqueName: \"kubernetes.io/projected/8db9b718-8110-4d6a-9082-26ebabcf60c6-kube-api-access-62jmc\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.636529 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/8db9b718-8110-4d6a-9082-26ebabcf60c6-runner\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.637044 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"runner\" (UniqueName: \"kubernetes.io/empty-dir/8db9b718-8110-4d6a-9082-26ebabcf60c6-runner\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.662413 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62jmc\" (UniqueName: \"kubernetes.io/projected/8db9b718-8110-4d6a-9082-26ebabcf60c6-kube-api-access-62jmc\") pod \"service-telemetry-operator-55b89ddfb9-sp9wb\" (UID: \"8db9b718-8110-4d6a-9082-26ebabcf60c6\") " pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:24 crc kubenswrapper[4985]: I0125 00:21:24.793985 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" Jan 25 00:21:25 crc kubenswrapper[4985]: I0125 00:21:25.033666 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb"] Jan 25 00:21:25 crc kubenswrapper[4985]: I0125 00:21:25.345935 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" event={"ID":"8db9b718-8110-4d6a-9082-26ebabcf60c6","Type":"ContainerStarted","Data":"79003e547c04f99c68a620fac23d7d02045863799ed3bb890f1041298295d874"} Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.565887 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-rl2xx"] Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.567039 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.578158 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"interconnect-operator-dockercfg-x2vmw" Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.591903 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-rl2xx"] Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.605245 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-88dzl\" (UniqueName: \"kubernetes.io/projected/438e0bde-637e-447f-a44d-9309326764de-kube-api-access-88dzl\") pod \"interconnect-operator-5bb49f789d-rl2xx\" (UID: \"438e0bde-637e-447f-a44d-9309326764de\") " pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.706517 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-88dzl\" (UniqueName: \"kubernetes.io/projected/438e0bde-637e-447f-a44d-9309326764de-kube-api-access-88dzl\") pod \"interconnect-operator-5bb49f789d-rl2xx\" (UID: \"438e0bde-637e-447f-a44d-9309326764de\") " pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.731735 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-88dzl\" (UniqueName: \"kubernetes.io/projected/438e0bde-637e-447f-a44d-9309326764de-kube-api-access-88dzl\") pod \"interconnect-operator-5bb49f789d-rl2xx\" (UID: \"438e0bde-637e-447f-a44d-9309326764de\") " pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" Jan 25 00:21:27 crc kubenswrapper[4985]: I0125 00:21:27.884644 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" Jan 25 00:21:28 crc kubenswrapper[4985]: I0125 00:21:28.332919 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/interconnect-operator-5bb49f789d-rl2xx"] Jan 25 00:21:28 crc kubenswrapper[4985]: W0125 00:21:28.337346 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod438e0bde_637e_447f_a44d_9309326764de.slice/crio-f4e8c65473a5c584b625f8e8e313b322b6b744757ebaa7166ea747df0ec24676 WatchSource:0}: Error finding container f4e8c65473a5c584b625f8e8e313b322b6b744757ebaa7166ea747df0ec24676: Status 404 returned error can't find the container with id f4e8c65473a5c584b625f8e8e313b322b6b744757ebaa7166ea747df0ec24676 Jan 25 00:21:28 crc kubenswrapper[4985]: I0125 00:21:28.381605 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" event={"ID":"438e0bde-637e-447f-a44d-9309326764de","Type":"ContainerStarted","Data":"f4e8c65473a5c584b625f8e8e313b322b6b744757ebaa7166ea747df0ec24676"} Jan 25 00:21:33 crc kubenswrapper[4985]: I0125 00:21:33.265156 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:33 crc kubenswrapper[4985]: I0125 00:21:33.315953 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:35 crc kubenswrapper[4985]: I0125 00:21:35.853714 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:35 crc kubenswrapper[4985]: I0125 00:21:35.854308 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-j5xvf" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="registry-server" containerID="cri-o://d5de0ec1931b5ff9ca008b5ad0e8387b17cdcba67cc8c22adb03ea4e0f27dbc1" gracePeriod=2 Jan 25 00:21:36 crc kubenswrapper[4985]: I0125 00:21:36.465047 4985 generic.go:334] "Generic (PLEG): container finished" podID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerID="d5de0ec1931b5ff9ca008b5ad0e8387b17cdcba67cc8c22adb03ea4e0f27dbc1" exitCode=0 Jan 25 00:21:36 crc kubenswrapper[4985]: I0125 00:21:36.465119 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerDied","Data":"d5de0ec1931b5ff9ca008b5ad0e8387b17cdcba67cc8c22adb03ea4e0f27dbc1"} Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.188085 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.335361 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content\") pod \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.335475 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5krz9\" (UniqueName: \"kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9\") pod \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.335582 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities\") pod \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\" (UID: \"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91\") " Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.336933 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities" (OuterVolumeSpecName: "utilities") pod "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" (UID: "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.342307 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9" (OuterVolumeSpecName: "kube-api-access-5krz9") pod "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" (UID: "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91"). InnerVolumeSpecName "kube-api-access-5krz9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.437363 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.437416 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5krz9\" (UniqueName: \"kubernetes.io/projected/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-kube-api-access-5krz9\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.462717 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" (UID: "d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.496166 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" event={"ID":"8db9b718-8110-4d6a-9082-26ebabcf60c6","Type":"ContainerStarted","Data":"5c9cc491204cb7ef9ccb194444e3a04028c9287854ac0146a057983e6ba3a65b"} Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.497732 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" event={"ID":"438e0bde-637e-447f-a44d-9309326764de","Type":"ContainerStarted","Data":"6faad628d9c8dec1658d01c81fe543d415bdf76743b06a1e450f58ae6648d0a6"} Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.499681 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-j5xvf" event={"ID":"d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91","Type":"ContainerDied","Data":"95911c680c67c06d88594eb1358c46ce36a293da83d787d8b294e89ede493891"} Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.499712 4985 scope.go:117] "RemoveContainer" containerID="d5de0ec1931b5ff9ca008b5ad0e8387b17cdcba67cc8c22adb03ea4e0f27dbc1" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.499733 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-j5xvf" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.522364 4985 scope.go:117] "RemoveContainer" containerID="79dca948dc9d489283eda126f9048c1e3cb8be2a566823c399319ab6a33d7549" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.523481 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/service-telemetry-operator-55b89ddfb9-sp9wb" podStartSLOduration=1.5300137569999999 podStartE2EDuration="16.523462751s" podCreationTimestamp="2026-01-25 00:21:24 +0000 UTC" firstStartedPulling="2026-01-25 00:21:25.036835279 +0000 UTC m=+895.068771542" lastFinishedPulling="2026-01-25 00:21:40.030284263 +0000 UTC m=+910.062220536" observedRunningTime="2026-01-25 00:21:40.518381413 +0000 UTC m=+910.550317696" watchObservedRunningTime="2026-01-25 00:21:40.523462751 +0000 UTC m=+910.555399024" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.538992 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/interconnect-operator-5bb49f789d-rl2xx" podStartSLOduration=1.924019361 podStartE2EDuration="13.538972321s" podCreationTimestamp="2026-01-25 00:21:27 +0000 UTC" firstStartedPulling="2026-01-25 00:21:28.339375242 +0000 UTC m=+898.371311515" lastFinishedPulling="2026-01-25 00:21:39.954328202 +0000 UTC m=+909.986264475" observedRunningTime="2026-01-25 00:21:40.535409375 +0000 UTC m=+910.567345648" watchObservedRunningTime="2026-01-25 00:21:40.538972321 +0000 UTC m=+910.570908604" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.539954 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.553270 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.555869 4985 scope.go:117] "RemoveContainer" containerID="cee80f9f73ba22eae38e6a400c61c30effa599200dca2d1af8630859666f5d56" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.558894 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-j5xvf"] Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.861680 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:40 crc kubenswrapper[4985]: E0125 00:21:40.861884 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="extract-content" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.861896 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="extract-content" Jan 25 00:21:40 crc kubenswrapper[4985]: E0125 00:21:40.861919 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="extract-utilities" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.861926 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="extract-utilities" Jan 25 00:21:40 crc kubenswrapper[4985]: E0125 00:21:40.861934 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="registry-server" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.861942 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="registry-server" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.862050 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" containerName="registry-server" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.863041 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.885443 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.943262 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.943330 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z2mpf\" (UniqueName: \"kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:40 crc kubenswrapper[4985]: I0125 00:21:40.943607 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.044607 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.044670 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.044732 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z2mpf\" (UniqueName: \"kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.045232 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.045728 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.068468 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z2mpf\" (UniqueName: \"kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf\") pod \"community-operators-ct8db\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.175944 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.441146 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:41 crc kubenswrapper[4985]: I0125 00:21:41.517474 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerStarted","Data":"6e31f7e1595fd71d9f9da230a8341dd56a7fd276a0741a3161346a6f9f4da782"} Jan 25 00:21:42 crc kubenswrapper[4985]: I0125 00:21:42.285768 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91" path="/var/lib/kubelet/pods/d8996bbf-8a4a-4cc9-bd4a-3f6a0c70fe91/volumes" Jan 25 00:21:43 crc kubenswrapper[4985]: I0125 00:21:43.535911 4985 generic.go:334] "Generic (PLEG): container finished" podID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerID="7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035" exitCode=0 Jan 25 00:21:43 crc kubenswrapper[4985]: I0125 00:21:43.535961 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerDied","Data":"7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035"} Jan 25 00:21:44 crc kubenswrapper[4985]: I0125 00:21:44.545006 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerStarted","Data":"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48"} Jan 25 00:21:45 crc kubenswrapper[4985]: I0125 00:21:45.554262 4985 generic.go:334] "Generic (PLEG): container finished" podID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerID="eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48" exitCode=0 Jan 25 00:21:45 crc kubenswrapper[4985]: I0125 00:21:45.554374 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerDied","Data":"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48"} Jan 25 00:21:46 crc kubenswrapper[4985]: I0125 00:21:46.565049 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerStarted","Data":"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4"} Jan 25 00:21:46 crc kubenswrapper[4985]: I0125 00:21:46.582674 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-ct8db" podStartSLOduration=4.194775934 podStartE2EDuration="6.58265327s" podCreationTimestamp="2026-01-25 00:21:40 +0000 UTC" firstStartedPulling="2026-01-25 00:21:43.537535475 +0000 UTC m=+913.569471738" lastFinishedPulling="2026-01-25 00:21:45.925412761 +0000 UTC m=+915.957349074" observedRunningTime="2026-01-25 00:21:46.582362003 +0000 UTC m=+916.614298286" watchObservedRunningTime="2026-01-25 00:21:46.58265327 +0000 UTC m=+916.614589543" Jan 25 00:21:51 crc kubenswrapper[4985]: I0125 00:21:51.177048 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:51 crc kubenswrapper[4985]: I0125 00:21:51.177420 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:51 crc kubenswrapper[4985]: I0125 00:21:51.233565 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:51 crc kubenswrapper[4985]: I0125 00:21:51.675239 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:52 crc kubenswrapper[4985]: I0125 00:21:52.852435 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:53 crc kubenswrapper[4985]: I0125 00:21:53.626256 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-ct8db" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="registry-server" containerID="cri-o://74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4" gracePeriod=2 Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.012442 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.116161 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z2mpf\" (UniqueName: \"kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf\") pod \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.116278 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content\") pod \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.116393 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities\") pod \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\" (UID: \"d086cfd3-9439-45b9-acc8-cb8a7b65e47e\") " Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.117811 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities" (OuterVolumeSpecName: "utilities") pod "d086cfd3-9439-45b9-acc8-cb8a7b65e47e" (UID: "d086cfd3-9439-45b9-acc8-cb8a7b65e47e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.124531 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf" (OuterVolumeSpecName: "kube-api-access-z2mpf") pod "d086cfd3-9439-45b9-acc8-cb8a7b65e47e" (UID: "d086cfd3-9439-45b9-acc8-cb8a7b65e47e"). InnerVolumeSpecName "kube-api-access-z2mpf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.189916 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d086cfd3-9439-45b9-acc8-cb8a7b65e47e" (UID: "d086cfd3-9439-45b9-acc8-cb8a7b65e47e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.218423 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.218466 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.218479 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z2mpf\" (UniqueName: \"kubernetes.io/projected/d086cfd3-9439-45b9-acc8-cb8a7b65e47e-kube-api-access-z2mpf\") on node \"crc\" DevicePath \"\"" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.638722 4985 generic.go:334] "Generic (PLEG): container finished" podID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerID="74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4" exitCode=0 Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.638785 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerDied","Data":"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4"} Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.638761 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-ct8db" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.638832 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-ct8db" event={"ID":"d086cfd3-9439-45b9-acc8-cb8a7b65e47e","Type":"ContainerDied","Data":"6e31f7e1595fd71d9f9da230a8341dd56a7fd276a0741a3161346a6f9f4da782"} Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.638849 4985 scope.go:117] "RemoveContainer" containerID="74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.670587 4985 scope.go:117] "RemoveContainer" containerID="eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.672389 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.687137 4985 scope.go:117] "RemoveContainer" containerID="7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.690898 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-ct8db"] Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.707208 4985 scope.go:117] "RemoveContainer" containerID="74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4" Jan 25 00:21:54 crc kubenswrapper[4985]: E0125 00:21:54.707696 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4\": container with ID starting with 74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4 not found: ID does not exist" containerID="74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.707738 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4"} err="failed to get container status \"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4\": rpc error: code = NotFound desc = could not find container \"74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4\": container with ID starting with 74036bcd64acd7b3dbcf29d9b565fcedd7149d4d23a83982e1087ae979f08fc4 not found: ID does not exist" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.707773 4985 scope.go:117] "RemoveContainer" containerID="eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48" Jan 25 00:21:54 crc kubenswrapper[4985]: E0125 00:21:54.708291 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48\": container with ID starting with eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48 not found: ID does not exist" containerID="eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.708319 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48"} err="failed to get container status \"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48\": rpc error: code = NotFound desc = could not find container \"eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48\": container with ID starting with eacfbec9e2a2e73bf26e55a20f0962e59c3a0b81d2965bdf9b780604fc200f48 not found: ID does not exist" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.708341 4985 scope.go:117] "RemoveContainer" containerID="7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035" Jan 25 00:21:54 crc kubenswrapper[4985]: E0125 00:21:54.708764 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035\": container with ID starting with 7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035 not found: ID does not exist" containerID="7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035" Jan 25 00:21:54 crc kubenswrapper[4985]: I0125 00:21:54.708815 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035"} err="failed to get container status \"7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035\": rpc error: code = NotFound desc = could not find container \"7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035\": container with ID starting with 7db1d768a7371afe08c7a55809f49e6df5f7e89cd1e126c67a675de1bf8ad035 not found: ID does not exist" Jan 25 00:21:56 crc kubenswrapper[4985]: I0125 00:21:56.283506 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" path="/var/lib/kubelet/pods/d086cfd3-9439-45b9-acc8-cb8a7b65e47e/volumes" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.972059 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:22:02 crc kubenswrapper[4985]: E0125 00:22:02.972992 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="extract-utilities" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.973013 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="extract-utilities" Jan 25 00:22:02 crc kubenswrapper[4985]: E0125 00:22:02.973043 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="extract-content" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.973054 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="extract-content" Jan 25 00:22:02 crc kubenswrapper[4985]: E0125 00:22:02.973072 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="registry-server" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.973082 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="registry-server" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.973261 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="d086cfd3-9439-45b9-acc8-cb8a7b65e47e" containerName="registry-server" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.973863 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.976895 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-dockercfg-5hqpc" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.977323 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-ca" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.977667 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-ca" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.978354 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-inter-router-credentials" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.979295 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-users" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.979669 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-interconnect-sasl-config" Jan 25 00:22:02 crc kubenswrapper[4985]: I0125 00:22:02.979721 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-openstack-credentials" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.005751 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140443 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140496 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140522 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bx9c7\" (UniqueName: \"kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140543 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140576 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140655 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.140704 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242629 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bx9c7\" (UniqueName: \"kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242719 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242784 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242832 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242890 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.242961 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.243019 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.243945 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.252526 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.253561 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.257176 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.258295 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.271609 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.278290 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bx9c7\" (UniqueName: \"kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7\") pod \"default-interconnect-68864d46cb-mtmlh\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.291964 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:22:03 crc kubenswrapper[4985]: W0125 00:22:03.545394 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod63289c99_25e5_4cb9_a2d0_4faf94b64988.slice/crio-3b2dbd375684520c07539667350f58f25afec3c2f25e9bb4c0f6de6fec304afd WatchSource:0}: Error finding container 3b2dbd375684520c07539667350f58f25afec3c2f25e9bb4c0f6de6fec304afd: Status 404 returned error can't find the container with id 3b2dbd375684520c07539667350f58f25afec3c2f25e9bb4c0f6de6fec304afd Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.557524 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:22:03 crc kubenswrapper[4985]: I0125 00:22:03.701914 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" event={"ID":"63289c99-25e5-4cb9-a2d0-4faf94b64988","Type":"ContainerStarted","Data":"3b2dbd375684520c07539667350f58f25afec3c2f25e9bb4c0f6de6fec304afd"} Jan 25 00:22:11 crc kubenswrapper[4985]: I0125 00:22:11.755638 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" event={"ID":"63289c99-25e5-4cb9-a2d0-4faf94b64988","Type":"ContainerStarted","Data":"ab75526a2c85ca8716a94658f1ebca425fb484f567b0fa35d6e10d048711a537"} Jan 25 00:22:11 crc kubenswrapper[4985]: I0125 00:22:11.788152 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" podStartSLOduration=2.011208257 podStartE2EDuration="9.788092183s" podCreationTimestamp="2026-01-25 00:22:02 +0000 UTC" firstStartedPulling="2026-01-25 00:22:03.547787537 +0000 UTC m=+933.579723800" lastFinishedPulling="2026-01-25 00:22:11.324671412 +0000 UTC m=+941.356607726" observedRunningTime="2026-01-25 00:22:11.778865543 +0000 UTC m=+941.810801846" watchObservedRunningTime="2026-01-25 00:22:11.788092183 +0000 UTC m=+941.820028496" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.340719 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.344519 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.346970 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-tls-assets-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.348062 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-stf-dockercfg-2wps4" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.351957 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352146 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-session-secret" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352290 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default-web-config" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352390 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"prometheus-default" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352535 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-1" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352575 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"prometheus-default-rulefiles-2" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352632 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-prometheus-proxy-tls" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.352959 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"serving-certs-ca-bundle" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.365630 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.493674 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-tls-assets\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.493741 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-web-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.493821 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9kx8\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-kube-api-access-m9kx8\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.493881 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.493994 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494063 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494120 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494150 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494177 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494245 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config-out\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494278 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.494548 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596056 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596164 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596209 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596256 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596330 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.596950 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-1\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-1\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597178 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597255 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config-out\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597283 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597324 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597355 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-tls-assets\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597381 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-web-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.597405 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9kx8\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-kube-api-access-m9kx8\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: E0125 00:22:13.597475 4985 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 25 00:22:13 crc kubenswrapper[4985]: E0125 00:22:13.602290 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls podName:5c8a5a59-9f83-491e-8fd0-93b4cc4941cc nodeName:}" failed. No retries permitted until 2026-01-25 00:22:14.102232766 +0000 UTC m=+944.134169079 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "5c8a5a59-9f83-491e-8fd0-93b4cc4941cc") : secret "default-prometheus-proxy-tls" not found Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.604445 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config-out\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.604710 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-web-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.605171 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-config\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.608843 4985 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.608912 4985 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/d756b9b0afce55956e493665c2c08a668c1f608074d3674fc3c9bf420244bd3f/globalmount\"" pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.611461 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-tls-assets\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.612035 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-session-secret\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.627067 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9kx8\" (UniqueName: \"kubernetes.io/projected/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-kube-api-access-m9kx8\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.630933 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-0\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-0\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.631881 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"prometheus-default-rulefiles-2\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-prometheus-default-rulefiles-2\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.634281 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"configmap-serving-certs-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-configmap-serving-certs-ca-bundle\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:13 crc kubenswrapper[4985]: I0125 00:22:13.648844 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-796ec9ec-c6f5-4312-b6b1-13a1625e9b7a\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:14 crc kubenswrapper[4985]: I0125 00:22:14.105445 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:14 crc kubenswrapper[4985]: E0125 00:22:14.105703 4985 secret.go:188] Couldn't get secret service-telemetry/default-prometheus-proxy-tls: secret "default-prometheus-proxy-tls" not found Jan 25 00:22:14 crc kubenswrapper[4985]: E0125 00:22:14.105799 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls podName:5c8a5a59-9f83-491e-8fd0-93b4cc4941cc nodeName:}" failed. No retries permitted until 2026-01-25 00:22:15.105777545 +0000 UTC m=+945.137713828 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-prometheus-proxy-tls" (UniqueName: "kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls") pod "prometheus-default-0" (UID: "5c8a5a59-9f83-491e-8fd0-93b4cc4941cc") : secret "default-prometheus-proxy-tls" not found Jan 25 00:22:15 crc kubenswrapper[4985]: I0125 00:22:15.121519 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:15 crc kubenswrapper[4985]: I0125 00:22:15.127624 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-prometheus-proxy-tls\" (UniqueName: \"kubernetes.io/secret/5c8a5a59-9f83-491e-8fd0-93b4cc4941cc-secret-default-prometheus-proxy-tls\") pod \"prometheus-default-0\" (UID: \"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc\") " pod="service-telemetry/prometheus-default-0" Jan 25 00:22:15 crc kubenswrapper[4985]: I0125 00:22:15.164844 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/prometheus-default-0" Jan 25 00:22:15 crc kubenswrapper[4985]: I0125 00:22:15.637400 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/prometheus-default-0"] Jan 25 00:22:15 crc kubenswrapper[4985]: I0125 00:22:15.783201 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerStarted","Data":"af37b4886fc6963a840777a288412c95ef289f71f624aca627fd350bb95b32e6"} Jan 25 00:22:20 crc kubenswrapper[4985]: I0125 00:22:20.827093 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerStarted","Data":"8e34316d2a33ca37f4f484444b104372118bc217f551c4b48bec9e561f4239b8"} Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.424085 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq"] Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.425370 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.450600 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq"] Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.593999 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwqx2\" (UniqueName: \"kubernetes.io/projected/81a908e4-c470-4f1b-9a45-6a7d339c8749-kube-api-access-qwqx2\") pod \"default-snmp-webhook-78bcbbdcff-kbwgq\" (UID: \"81a908e4-c470-4f1b-9a45-6a7d339c8749\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.694952 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwqx2\" (UniqueName: \"kubernetes.io/projected/81a908e4-c470-4f1b-9a45-6a7d339c8749-kube-api-access-qwqx2\") pod \"default-snmp-webhook-78bcbbdcff-kbwgq\" (UID: \"81a908e4-c470-4f1b-9a45-6a7d339c8749\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.717272 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwqx2\" (UniqueName: \"kubernetes.io/projected/81a908e4-c470-4f1b-9a45-6a7d339c8749-kube-api-access-qwqx2\") pod \"default-snmp-webhook-78bcbbdcff-kbwgq\" (UID: \"81a908e4-c470-4f1b-9a45-6a7d339c8749\") " pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" Jan 25 00:22:23 crc kubenswrapper[4985]: I0125 00:22:23.746918 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" Jan 25 00:22:24 crc kubenswrapper[4985]: I0125 00:22:24.013538 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq"] Jan 25 00:22:24 crc kubenswrapper[4985]: I0125 00:22:24.873158 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" event={"ID":"81a908e4-c470-4f1b-9a45-6a7d339c8749","Type":"ContainerStarted","Data":"3191a295fb339f6342377b6fe8be4bee2d921cc5c8f538eb169e8f78ab62be98"} Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.922695 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.924311 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.925620 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-alertmanager-proxy-tls" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.931988 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-web-config" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.933332 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-generated" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.933666 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-wq78h" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.933820 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-cluster-tls-config" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.939267 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-default-tls-assets-0" Jan 25 00:22:26 crc kubenswrapper[4985]: I0125 00:22:26.948461 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.040842 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041034 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041231 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-tls-assets\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041283 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041377 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-web-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041464 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j85td\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-kube-api-access-j85td\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041511 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041545 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/049acf2f-5927-42db-8380-a735fd0804b3-config-out\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.041566 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-config-volume\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143243 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143306 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-tls-assets\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143323 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143353 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-web-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143383 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j85td\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-kube-api-access-j85td\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143416 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143434 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/049acf2f-5927-42db-8380-a735fd0804b3-config-out\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: E0125 00:22:27.143441 4985 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143461 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-config-volume\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: E0125 00:22:27.143508 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls podName:049acf2f-5927-42db-8380-a735fd0804b3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:27.643489262 +0000 UTC m=+957.675425535 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "049acf2f-5927-42db-8380-a735fd0804b3") : secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.143525 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.149272 4985 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.149320 4985 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/2345504d01bb9b5e9d975d4c73a3d67332bcea4d2121c0d3dd02f7e518340ce6/globalmount\"" pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.149382 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-session-secret\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-session-secret\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.150789 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cluster-tls-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-cluster-tls-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.151307 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-config-volume\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.151942 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-assets\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-tls-assets\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.154783 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-out\" (UniqueName: \"kubernetes.io/empty-dir/049acf2f-5927-42db-8380-a735fd0804b3-config-out\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.156553 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"web-config\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-web-config\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.167328 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j85td\" (UniqueName: \"kubernetes.io/projected/049acf2f-5927-42db-8380-a735fd0804b3-kube-api-access-j85td\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.206408 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-a75b840d-626c-435c-b374-4a7b0257eeb1\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.650870 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:27 crc kubenswrapper[4985]: E0125 00:22:27.651073 4985 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:27 crc kubenswrapper[4985]: E0125 00:22:27.651205 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls podName:049acf2f-5927-42db-8380-a735fd0804b3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:28.651177044 +0000 UTC m=+958.683113317 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "049acf2f-5927-42db-8380-a735fd0804b3") : secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.893573 4985 generic.go:334] "Generic (PLEG): container finished" podID="5c8a5a59-9f83-491e-8fd0-93b4cc4941cc" containerID="8e34316d2a33ca37f4f484444b104372118bc217f551c4b48bec9e561f4239b8" exitCode=0 Jan 25 00:22:27 crc kubenswrapper[4985]: I0125 00:22:27.893619 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerDied","Data":"8e34316d2a33ca37f4f484444b104372118bc217f551c4b48bec9e561f4239b8"} Jan 25 00:22:28 crc kubenswrapper[4985]: I0125 00:22:28.665999 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:28 crc kubenswrapper[4985]: E0125 00:22:28.666212 4985 secret.go:188] Couldn't get secret service-telemetry/default-alertmanager-proxy-tls: secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:28 crc kubenswrapper[4985]: E0125 00:22:28.666597 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls podName:049acf2f-5927-42db-8380-a735fd0804b3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:30.666571529 +0000 UTC m=+960.698507822 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "secret-default-alertmanager-proxy-tls" (UniqueName: "kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls") pod "alertmanager-default-0" (UID: "049acf2f-5927-42db-8380-a735fd0804b3") : secret "default-alertmanager-proxy-tls" not found Jan 25 00:22:30 crc kubenswrapper[4985]: I0125 00:22:30.694492 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:30 crc kubenswrapper[4985]: I0125 00:22:30.703637 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-default-alertmanager-proxy-tls\" (UniqueName: \"kubernetes.io/secret/049acf2f-5927-42db-8380-a735fd0804b3-secret-default-alertmanager-proxy-tls\") pod \"alertmanager-default-0\" (UID: \"049acf2f-5927-42db-8380-a735fd0804b3\") " pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:30 crc kubenswrapper[4985]: I0125 00:22:30.854088 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"alertmanager-stf-dockercfg-wq78h" Jan 25 00:22:30 crc kubenswrapper[4985]: I0125 00:22:30.863189 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/alertmanager-default-0" Jan 25 00:22:35 crc kubenswrapper[4985]: I0125 00:22:35.295725 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/alertmanager-default-0"] Jan 25 00:22:35 crc kubenswrapper[4985]: I0125 00:22:35.982425 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerStarted","Data":"d07cc2741f02f3769be12f088c44df474321ad5f76689f7d005c2c7a4d242638"} Jan 25 00:22:40 crc kubenswrapper[4985]: I0125 00:22:40.106004 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" event={"ID":"81a908e4-c470-4f1b-9a45-6a7d339c8749","Type":"ContainerStarted","Data":"ec4fe621e7c95ba3c2c4a4ccb5a2cae5e9c7a29ff69cc90a9aca66fa0d582f82"} Jan 25 00:22:40 crc kubenswrapper[4985]: I0125 00:22:40.125713 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-snmp-webhook-78bcbbdcff-kbwgq" podStartSLOduration=5.573552716 podStartE2EDuration="17.125692613s" podCreationTimestamp="2026-01-25 00:22:23 +0000 UTC" firstStartedPulling="2026-01-25 00:22:24.016849145 +0000 UTC m=+954.048785418" lastFinishedPulling="2026-01-25 00:22:35.568989042 +0000 UTC m=+965.600925315" observedRunningTime="2026-01-25 00:22:40.122969329 +0000 UTC m=+970.154905622" watchObservedRunningTime="2026-01-25 00:22:40.125692613 +0000 UTC m=+970.157628896" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.113039 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerStarted","Data":"f59b48929d061acc3d39d217f2f907e0445b1b06e1a6b45cb0758cf2f83762b0"} Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.431983 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46"] Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.433238 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.434979 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-meter-sg-core-configmap" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.435790 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-coll-meter-proxy-tls" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.435830 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-session-secret" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.443235 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46"] Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.443717 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"smart-gateway-dockercfg-lhd42" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.509429 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.509490 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.509523 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dxzr2\" (UniqueName: \"kubernetes.io/projected/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-kube-api-access-dxzr2\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.509549 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.509589 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.611465 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.611532 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.611580 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dxzr2\" (UniqueName: \"kubernetes.io/projected/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-kube-api-access-dxzr2\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.611613 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.611662 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: E0125 00:22:41.611798 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 25 00:22:41 crc kubenswrapper[4985]: E0125 00:22:41.611856 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls podName:bfe2073c-f3bf-42e3-8d47-394d4fc025d3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:42.111837247 +0000 UTC m=+972.143773520 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" (UID: "bfe2073c-f3bf-42e3-8d47-394d4fc025d3") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.612750 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-socket-dir\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.613190 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-sg-core-config\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.661186 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-session-secret\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:41 crc kubenswrapper[4985]: I0125 00:22:41.662301 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dxzr2\" (UniqueName: \"kubernetes.io/projected/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-kube-api-access-dxzr2\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:42 crc kubenswrapper[4985]: E0125 00:22:42.118231 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-coll-meter-proxy-tls: secret "default-cloud1-coll-meter-proxy-tls" not found Jan 25 00:22:42 crc kubenswrapper[4985]: E0125 00:22:42.118341 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls podName:bfe2073c-f3bf-42e3-8d47-394d4fc025d3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:43.118312787 +0000 UTC m=+973.150249090 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-coll-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls") pod "default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" (UID: "bfe2073c-f3bf-42e3-8d47-394d4fc025d3") : secret "default-cloud1-coll-meter-proxy-tls" not found Jan 25 00:22:42 crc kubenswrapper[4985]: I0125 00:22:42.117977 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:42 crc kubenswrapper[4985]: I0125 00:22:42.122326 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerStarted","Data":"a5d7a7b4528943fe75c33d0e53291c99529373cdf7ec77c1c13c48ba3e61ffb8"} Jan 25 00:22:43 crc kubenswrapper[4985]: I0125 00:22:43.130767 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerStarted","Data":"f8977dbe79452be5eb758d875433ea7fa77e08d66dd340db120834187b5cb48a"} Jan 25 00:22:43 crc kubenswrapper[4985]: I0125 00:22:43.141741 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:43 crc kubenswrapper[4985]: I0125 00:22:43.146745 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-coll-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/bfe2073c-f3bf-42e3-8d47-394d4fc025d3-default-cloud1-coll-meter-proxy-tls\") pod \"default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46\" (UID: \"bfe2073c-f3bf-42e3-8d47-394d4fc025d3\") " pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:43 crc kubenswrapper[4985]: I0125 00:22:43.262517 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.069121 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq"] Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.071054 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.072826 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-ceil-meter-proxy-tls" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.073264 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-meter-sg-core-configmap" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.086042 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq"] Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.108460 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.108553 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.108674 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/97e8c495-9a25-42da-a0ab-5a77667b4623-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.108753 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2msn\" (UniqueName: \"kubernetes.io/projected/97e8c495-9a25-42da-a0ab-5a77667b4623-kube-api-access-x2msn\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.108844 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/97e8c495-9a25-42da-a0ab-5a77667b4623-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.206293 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46"] Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.209677 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2msn\" (UniqueName: \"kubernetes.io/projected/97e8c495-9a25-42da-a0ab-5a77667b4623-kube-api-access-x2msn\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.209724 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/97e8c495-9a25-42da-a0ab-5a77667b4623-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.209754 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.209790 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.209857 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/97e8c495-9a25-42da-a0ab-5a77667b4623-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: E0125 00:22:44.210092 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 25 00:22:44 crc kubenswrapper[4985]: E0125 00:22:44.210155 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls podName:97e8c495-9a25-42da-a0ab-5a77667b4623 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:44.710140681 +0000 UTC m=+974.742076954 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" (UID: "97e8c495-9a25-42da-a0ab-5a77667b4623") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.210488 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/97e8c495-9a25-42da-a0ab-5a77667b4623-socket-dir\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.210970 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/97e8c495-9a25-42da-a0ab-5a77667b4623-sg-core-config\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.218965 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-session-secret\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: W0125 00:22:44.224426 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbfe2073c_f3bf_42e3_8d47_394d4fc025d3.slice/crio-5f134212b3758f7f262d64d485f32ecb23a37b12be97cc0c2b69f25a74dae659 WatchSource:0}: Error finding container 5f134212b3758f7f262d64d485f32ecb23a37b12be97cc0c2b69f25a74dae659: Status 404 returned error can't find the container with id 5f134212b3758f7f262d64d485f32ecb23a37b12be97cc0c2b69f25a74dae659 Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.227078 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2msn\" (UniqueName: \"kubernetes.io/projected/97e8c495-9a25-42da-a0ab-5a77667b4623-kube-api-access-x2msn\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: I0125 00:22:44.718896 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:44 crc kubenswrapper[4985]: E0125 00:22:44.719071 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-ceil-meter-proxy-tls: secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 25 00:22:44 crc kubenswrapper[4985]: E0125 00:22:44.719281 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls podName:97e8c495-9a25-42da-a0ab-5a77667b4623 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:45.719263602 +0000 UTC m=+975.751199875 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-ceil-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls") pod "default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" (UID: "97e8c495-9a25-42da-a0ab-5a77667b4623") : secret "default-cloud1-ceil-meter-proxy-tls" not found Jan 25 00:22:45 crc kubenswrapper[4985]: I0125 00:22:45.144814 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerStarted","Data":"5f134212b3758f7f262d64d485f32ecb23a37b12be97cc0c2b69f25a74dae659"} Jan 25 00:22:45 crc kubenswrapper[4985]: I0125 00:22:45.729470 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:45 crc kubenswrapper[4985]: I0125 00:22:45.737812 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-ceil-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/97e8c495-9a25-42da-a0ab-5a77667b4623-default-cloud1-ceil-meter-proxy-tls\") pod \"default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq\" (UID: \"97e8c495-9a25-42da-a0ab-5a77667b4623\") " pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:45 crc kubenswrapper[4985]: I0125 00:22:45.907838 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.647280 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg"] Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.648660 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.651434 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-sens-meter-sg-core-configmap" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.651515 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-cloud1-sens-meter-proxy-tls" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.658090 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.658154 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nt5l\" (UniqueName: \"kubernetes.io/projected/75635f0e-e1c4-4a69-8893-5cabc8341db3-kube-api-access-8nt5l\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.658176 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/75635f0e-e1c4-4a69-8893-5cabc8341db3-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.658198 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.658229 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/75635f0e-e1c4-4a69-8893-5cabc8341db3-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.699984 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg"] Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.760802 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.760876 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nt5l\" (UniqueName: \"kubernetes.io/projected/75635f0e-e1c4-4a69-8893-5cabc8341db3-kube-api-access-8nt5l\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.760908 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/75635f0e-e1c4-4a69-8893-5cabc8341db3-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.760950 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.761013 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/75635f0e-e1c4-4a69-8893-5cabc8341db3-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: E0125 00:22:47.761334 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 25 00:22:47 crc kubenswrapper[4985]: E0125 00:22:47.761391 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls podName:75635f0e-e1c4-4a69-8893-5cabc8341db3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:48.261375676 +0000 UTC m=+978.293311949 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" (UID: "75635f0e-e1c4-4a69-8893-5cabc8341db3") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.762170 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/75635f0e-e1c4-4a69-8893-5cabc8341db3-sg-core-config\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.762462 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/75635f0e-e1c4-4a69-8893-5cabc8341db3-socket-dir\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.765952 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"session-secret\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-session-secret\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:47 crc kubenswrapper[4985]: I0125 00:22:47.780163 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nt5l\" (UniqueName: \"kubernetes.io/projected/75635f0e-e1c4-4a69-8893-5cabc8341db3-kube-api-access-8nt5l\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:48 crc kubenswrapper[4985]: I0125 00:22:48.271476 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:48 crc kubenswrapper[4985]: E0125 00:22:48.271740 4985 secret.go:188] Couldn't get secret service-telemetry/default-cloud1-sens-meter-proxy-tls: secret "default-cloud1-sens-meter-proxy-tls" not found Jan 25 00:22:48 crc kubenswrapper[4985]: E0125 00:22:48.271910 4985 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls podName:75635f0e-e1c4-4a69-8893-5cabc8341db3 nodeName:}" failed. No retries permitted until 2026-01-25 00:22:49.271881455 +0000 UTC m=+979.303817728 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "default-cloud1-sens-meter-proxy-tls" (UniqueName: "kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls") pod "default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" (UID: "75635f0e-e1c4-4a69-8893-5cabc8341db3") : secret "default-cloud1-sens-meter-proxy-tls" not found Jan 25 00:22:49 crc kubenswrapper[4985]: I0125 00:22:49.350877 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:49 crc kubenswrapper[4985]: I0125 00:22:49.364355 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-cloud1-sens-meter-proxy-tls\" (UniqueName: \"kubernetes.io/secret/75635f0e-e1c4-4a69-8893-5cabc8341db3-default-cloud1-sens-meter-proxy-tls\") pod \"default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg\" (UID: \"75635f0e-e1c4-4a69-8893-5cabc8341db3\") " pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:49 crc kubenswrapper[4985]: I0125 00:22:49.465644 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" Jan 25 00:22:51 crc kubenswrapper[4985]: I0125 00:22:51.501378 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg"] Jan 25 00:22:51 crc kubenswrapper[4985]: W0125 00:22:51.505209 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod75635f0e_e1c4_4a69_8893_5cabc8341db3.slice/crio-b2a85052b4afa4004cdeb9f90126dc9214a48b533476d874edd9fc65af387690 WatchSource:0}: Error finding container b2a85052b4afa4004cdeb9f90126dc9214a48b533476d874edd9fc65af387690: Status 404 returned error can't find the container with id b2a85052b4afa4004cdeb9f90126dc9214a48b533476d874edd9fc65af387690 Jan 25 00:22:51 crc kubenswrapper[4985]: I0125 00:22:51.551938 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq"] Jan 25 00:22:51 crc kubenswrapper[4985]: W0125 00:22:51.556790 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod97e8c495_9a25_42da_a0ab_5a77667b4623.slice/crio-c1928aadd29c676bdcf1570b53157be7dcf1eae383e7ef6706c95ff928cc5d7f WatchSource:0}: Error finding container c1928aadd29c676bdcf1570b53157be7dcf1eae383e7ef6706c95ff928cc5d7f: Status 404 returned error can't find the container with id c1928aadd29c676bdcf1570b53157be7dcf1eae383e7ef6706c95ff928cc5d7f Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.195661 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerStarted","Data":"2c0731871172a541132c110a350287e2ba4f16e10fd88e7d02fbb743ca8a4006"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.196003 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerStarted","Data":"b2a85052b4afa4004cdeb9f90126dc9214a48b533476d874edd9fc65af387690"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.198627 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/prometheus-default-0" event={"ID":"5c8a5a59-9f83-491e-8fd0-93b4cc4941cc","Type":"ContainerStarted","Data":"d16d64a292b8c8dfa9843090e21e55b59c6291428ab505591f40163088ef4f5b"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.199682 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerStarted","Data":"c1928aadd29c676bdcf1570b53157be7dcf1eae383e7ef6706c95ff928cc5d7f"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.200911 4985 generic.go:334] "Generic (PLEG): container finished" podID="049acf2f-5927-42db-8380-a735fd0804b3" containerID="a5d7a7b4528943fe75c33d0e53291c99529373cdf7ec77c1c13c48ba3e61ffb8" exitCode=0 Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.200971 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerDied","Data":"a5d7a7b4528943fe75c33d0e53291c99529373cdf7ec77c1c13c48ba3e61ffb8"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.202180 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerStarted","Data":"5c6ed0557d507da3828c8825cb78df34c113cde55707984631751665eb2db604"} Jan 25 00:22:52 crc kubenswrapper[4985]: I0125 00:22:52.229368 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/prometheus-default-0" podStartSLOduration=4.722142309 podStartE2EDuration="40.229349269s" podCreationTimestamp="2026-01-25 00:22:12 +0000 UTC" firstStartedPulling="2026-01-25 00:22:15.643805447 +0000 UTC m=+945.675741720" lastFinishedPulling="2026-01-25 00:22:51.151012407 +0000 UTC m=+981.182948680" observedRunningTime="2026-01-25 00:22:52.228955289 +0000 UTC m=+982.260891572" watchObservedRunningTime="2026-01-25 00:22:52.229349269 +0000 UTC m=+982.261285552" Jan 25 00:22:53 crc kubenswrapper[4985]: I0125 00:22:53.213044 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerStarted","Data":"5e595285cb3ebc4632d2822a787f98a922c5b07f68f3145f858713da348d8a5c"} Jan 25 00:22:54 crc kubenswrapper[4985]: I0125 00:22:54.845185 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t"] Jan 25 00:22:54 crc kubenswrapper[4985]: I0125 00:22:54.848125 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:54 crc kubenswrapper[4985]: I0125 00:22:54.850914 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-coll-event-sg-core-configmap" Jan 25 00:22:54 crc kubenswrapper[4985]: I0125 00:22:54.851067 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"elasticsearch-es-cert" Jan 25 00:22:54 crc kubenswrapper[4985]: I0125 00:22:54.852655 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t"] Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.046290 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0f416a9f-a874-47ae-b48b-b490193667c4-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.046656 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qkv9\" (UniqueName: \"kubernetes.io/projected/0f416a9f-a874-47ae-b48b-b490193667c4-kube-api-access-2qkv9\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.046707 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/0f416a9f-a874-47ae-b48b-b490193667c4-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.046752 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0f416a9f-a874-47ae-b48b-b490193667c4-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.148358 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/0f416a9f-a874-47ae-b48b-b490193667c4-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.149247 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0f416a9f-a874-47ae-b48b-b490193667c4-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.149348 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0f416a9f-a874-47ae-b48b-b490193667c4-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.149458 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qkv9\" (UniqueName: \"kubernetes.io/projected/0f416a9f-a874-47ae-b48b-b490193667c4-kube-api-access-2qkv9\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.150126 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/0f416a9f-a874-47ae-b48b-b490193667c4-sg-core-config\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.150952 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/0f416a9f-a874-47ae-b48b-b490193667c4-socket-dir\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.153635 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/0f416a9f-a874-47ae-b48b-b490193667c4-elastic-certs\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.179059 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="service-telemetry/prometheus-default-0" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.190725 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qkv9\" (UniqueName: \"kubernetes.io/projected/0f416a9f-a874-47ae-b48b-b490193667c4-kube-api-access-2qkv9\") pod \"default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t\" (UID: \"0f416a9f-a874-47ae-b48b-b490193667c4\") " pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.226836 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerStarted","Data":"1b9bb0555df91712197efa7358fc63481effcddfb1bf47a0348e38663f1c78ae"} Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.361296 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x"] Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.363771 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.366063 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"default-cloud1-ceil-event-sg-core-configmap" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.377313 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x"] Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.471045 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.556748 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/512b3aca-4b83-4d34-8401-a14ea99f20f2-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.556795 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t7gt9\" (UniqueName: \"kubernetes.io/projected/512b3aca-4b83-4d34-8401-a14ea99f20f2-kube-api-access-t7gt9\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.556830 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/512b3aca-4b83-4d34-8401-a14ea99f20f2-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.556849 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/512b3aca-4b83-4d34-8401-a14ea99f20f2-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.670568 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/512b3aca-4b83-4d34-8401-a14ea99f20f2-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.670806 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t7gt9\" (UniqueName: \"kubernetes.io/projected/512b3aca-4b83-4d34-8401-a14ea99f20f2-kube-api-access-t7gt9\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.670851 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/512b3aca-4b83-4d34-8401-a14ea99f20f2-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.670872 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/512b3aca-4b83-4d34-8401-a14ea99f20f2-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.671246 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/empty-dir/512b3aca-4b83-4d34-8401-a14ea99f20f2-socket-dir\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.673145 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-config\" (UniqueName: \"kubernetes.io/configmap/512b3aca-4b83-4d34-8401-a14ea99f20f2-sg-core-config\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.691489 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"elastic-certs\" (UniqueName: \"kubernetes.io/secret/512b3aca-4b83-4d34-8401-a14ea99f20f2-elastic-certs\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.695914 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t7gt9\" (UniqueName: \"kubernetes.io/projected/512b3aca-4b83-4d34-8401-a14ea99f20f2-kube-api-access-t7gt9\") pod \"default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x\" (UID: \"512b3aca-4b83-4d34-8401-a14ea99f20f2\") " pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.911139 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t"] Jan 25 00:22:55 crc kubenswrapper[4985]: I0125 00:22:55.985022 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" Jan 25 00:22:56 crc kubenswrapper[4985]: I0125 00:22:56.233445 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" event={"ID":"0f416a9f-a874-47ae-b48b-b490193667c4","Type":"ContainerStarted","Data":"57187114dd4e33fa202d11798b29c964de3c13a5168e16ea2168fc082adcd513"} Jan 25 00:22:56 crc kubenswrapper[4985]: I0125 00:22:56.474892 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x"] Jan 25 00:22:56 crc kubenswrapper[4985]: W0125 00:22:56.563802 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod512b3aca_4b83_4d34_8401_a14ea99f20f2.slice/crio-b6f7916dd5cd36f57945daaa64ee22c2c0778c90859686ece4330414f841aa5e WatchSource:0}: Error finding container b6f7916dd5cd36f57945daaa64ee22c2c0778c90859686ece4330414f841aa5e: Status 404 returned error can't find the container with id b6f7916dd5cd36f57945daaa64ee22c2c0778c90859686ece4330414f841aa5e Jan 25 00:22:57 crc kubenswrapper[4985]: I0125 00:22:57.245585 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" event={"ID":"512b3aca-4b83-4d34-8401-a14ea99f20f2","Type":"ContainerStarted","Data":"b6f7916dd5cd36f57945daaa64ee22c2c0778c90859686ece4330414f841aa5e"} Jan 25 00:22:57 crc kubenswrapper[4985]: I0125 00:22:57.252962 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerStarted","Data":"b69a97482c98809d9ee9f1fc7f47502cea38b96143edc4ee39579bb68e811c87"} Jan 25 00:23:00 crc kubenswrapper[4985]: I0125 00:23:00.166755 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="service-telemetry/prometheus-default-0" Jan 25 00:23:00 crc kubenswrapper[4985]: I0125 00:23:00.211015 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="service-telemetry/prometheus-default-0" Jan 25 00:23:00 crc kubenswrapper[4985]: I0125 00:23:00.310748 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="service-telemetry/prometheus-default-0" Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.290720 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerStarted","Data":"dcf938b2f942cf5cff4d37e27aa180ce42d9d9f2b37fdca9fd989ccfe7a51f0a"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.297515 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/alertmanager-default-0" event={"ID":"049acf2f-5927-42db-8380-a735fd0804b3","Type":"ContainerStarted","Data":"48aefb49f0903d76f62a83fd91b6a65d7b79f74dd5dd516f85b5439e31895568"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.300877 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerStarted","Data":"38537835350a7e3fc8cb1149f3614bcc90d53faa781a06c7d7918a70e830008f"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.302367 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" event={"ID":"512b3aca-4b83-4d34-8401-a14ea99f20f2","Type":"ContainerStarted","Data":"5a0e0200e70348e376e90a18edd351b6d8e6065c405f80db4bc65a57f59358d4"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.303958 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerStarted","Data":"26f6364fe48fde502d6ba4ce7ab892e8ff754ba1707af49923fc257e39357a1e"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.305442 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" event={"ID":"0f416a9f-a874-47ae-b48b-b490193667c4","Type":"ContainerStarted","Data":"c9c6ab615a7caf654d2def94757d5da988c54b5e8e45bb178a633a74ee91a4c0"} Jan 25 00:23:02 crc kubenswrapper[4985]: I0125 00:23:02.337201 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/alertmanager-default-0" podStartSLOduration=27.578764869 podStartE2EDuration="37.337176717s" podCreationTimestamp="2026-01-25 00:22:25 +0000 UTC" firstStartedPulling="2026-01-25 00:22:52.206055368 +0000 UTC m=+982.237991641" lastFinishedPulling="2026-01-25 00:23:01.964467216 +0000 UTC m=+991.996403489" observedRunningTime="2026-01-25 00:23:02.330979289 +0000 UTC m=+992.362915572" watchObservedRunningTime="2026-01-25 00:23:02.337176717 +0000 UTC m=+992.369113020" Jan 25 00:23:05 crc kubenswrapper[4985]: I0125 00:23:05.836160 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:23:05 crc kubenswrapper[4985]: I0125 00:23:05.836945 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:23:07 crc kubenswrapper[4985]: I0125 00:23:07.820930 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:23:07 crc kubenswrapper[4985]: I0125 00:23:07.821310 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" podUID="63289c99-25e5-4cb9-a2d0-4faf94b64988" containerName="default-interconnect" containerID="cri-o://ab75526a2c85ca8716a94658f1ebca425fb484f567b0fa35d6e10d048711a537" gracePeriod=30 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.357879 4985 generic.go:334] "Generic (PLEG): container finished" podID="512b3aca-4b83-4d34-8401-a14ea99f20f2" containerID="5a0e0200e70348e376e90a18edd351b6d8e6065c405f80db4bc65a57f59358d4" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.357977 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" event={"ID":"512b3aca-4b83-4d34-8401-a14ea99f20f2","Type":"ContainerDied","Data":"5a0e0200e70348e376e90a18edd351b6d8e6065c405f80db4bc65a57f59358d4"} Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.361454 4985 generic.go:334] "Generic (PLEG): container finished" podID="75635f0e-e1c4-4a69-8893-5cabc8341db3" containerID="26f6364fe48fde502d6ba4ce7ab892e8ff754ba1707af49923fc257e39357a1e" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.361506 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerDied","Data":"26f6364fe48fde502d6ba4ce7ab892e8ff754ba1707af49923fc257e39357a1e"} Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.363164 4985 generic.go:334] "Generic (PLEG): container finished" podID="0f416a9f-a874-47ae-b48b-b490193667c4" containerID="c9c6ab615a7caf654d2def94757d5da988c54b5e8e45bb178a633a74ee91a4c0" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.363211 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" event={"ID":"0f416a9f-a874-47ae-b48b-b490193667c4","Type":"ContainerDied","Data":"c9c6ab615a7caf654d2def94757d5da988c54b5e8e45bb178a633a74ee91a4c0"} Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.365399 4985 generic.go:334] "Generic (PLEG): container finished" podID="97e8c495-9a25-42da-a0ab-5a77667b4623" containerID="dcf938b2f942cf5cff4d37e27aa180ce42d9d9f2b37fdca9fd989ccfe7a51f0a" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.365449 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerDied","Data":"dcf938b2f942cf5cff4d37e27aa180ce42d9d9f2b37fdca9fd989ccfe7a51f0a"} Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.372182 4985 generic.go:334] "Generic (PLEG): container finished" podID="bfe2073c-f3bf-42e3-8d47-394d4fc025d3" containerID="38537835350a7e3fc8cb1149f3614bcc90d53faa781a06c7d7918a70e830008f" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.372293 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerDied","Data":"38537835350a7e3fc8cb1149f3614bcc90d53faa781a06c7d7918a70e830008f"} Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.374560 4985 generic.go:334] "Generic (PLEG): container finished" podID="63289c99-25e5-4cb9-a2d0-4faf94b64988" containerID="ab75526a2c85ca8716a94658f1ebca425fb484f567b0fa35d6e10d048711a537" exitCode=0 Jan 25 00:23:09 crc kubenswrapper[4985]: I0125 00:23:09.374597 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" event={"ID":"63289c99-25e5-4cb9-a2d0-4faf94b64988","Type":"ContainerDied","Data":"ab75526a2c85ca8716a94658f1ebca425fb484f567b0fa35d6e10d048711a537"} Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.083900 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.114942 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-qlh7z"] Jan 25 00:23:10 crc kubenswrapper[4985]: E0125 00:23:10.115327 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="63289c99-25e5-4cb9-a2d0-4faf94b64988" containerName="default-interconnect" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.115341 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="63289c99-25e5-4cb9-a2d0-4faf94b64988" containerName="default-interconnect" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.115469 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="63289c99-25e5-4cb9-a2d0-4faf94b64988" containerName="default-interconnect" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.115929 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.140429 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-qlh7z"] Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.198050 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bx9c7\" (UniqueName: \"kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.199315 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.199450 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.199503 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.199642 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.199794 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.200365 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config" (OuterVolumeSpecName: "sasl-config") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "sasl-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.200697 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca\") pod \"63289c99-25e5-4cb9-a2d0-4faf94b64988\" (UID: \"63289c99-25e5-4cb9-a2d0-4faf94b64988\") " Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.200989 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201020 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201066 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-users\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201089 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201144 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-config\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201321 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czmcf\" (UniqueName: \"kubernetes.io/projected/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-kube-api-access-czmcf\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201489 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.201636 4985 reconciler_common.go:293] "Volume detached for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.205546 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7" (OuterVolumeSpecName: "kube-api-access-bx9c7") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "kube-api-access-bx9c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.205639 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users" (OuterVolumeSpecName: "sasl-users") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "sasl-users". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.206418 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials" (OuterVolumeSpecName: "default-interconnect-openstack-credentials") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "default-interconnect-openstack-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.207519 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca" (OuterVolumeSpecName: "default-interconnect-openstack-ca") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "default-interconnect-openstack-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.209206 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca" (OuterVolumeSpecName: "default-interconnect-inter-router-ca") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "default-interconnect-inter-router-ca". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.227565 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials" (OuterVolumeSpecName: "default-interconnect-inter-router-credentials") pod "63289c99-25e5-4cb9-a2d0-4faf94b64988" (UID: "63289c99-25e5-4cb9-a2d0-4faf94b64988"). InnerVolumeSpecName "default-interconnect-inter-router-credentials". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302613 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302653 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302692 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-users\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302714 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302744 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-config\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302772 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czmcf\" (UniqueName: \"kubernetes.io/projected/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-kube-api-access-czmcf\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302813 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302863 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bx9c7\" (UniqueName: \"kubernetes.io/projected/63289c99-25e5-4cb9-a2d0-4faf94b64988-kube-api-access-bx9c7\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302875 4985 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302885 4985 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-openstack-credentials\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302895 4985 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-credentials\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302904 4985 reconciler_common.go:293] "Volume detached for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-default-interconnect-inter-router-ca\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.302914 4985 reconciler_common.go:293] "Volume detached for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/63289c99-25e5-4cb9-a2d0-4faf94b64988-sasl-users\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.304440 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-config\" (UniqueName: \"kubernetes.io/configmap/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-config\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.306544 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sasl-users\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-sasl-users\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.307152 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.308156 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-credentials\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-credentials\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.310675 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-inter-router-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-inter-router-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.315533 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-openstack-ca\" (UniqueName: \"kubernetes.io/secret/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-default-interconnect-openstack-ca\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.318675 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czmcf\" (UniqueName: \"kubernetes.io/projected/9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77-kube-api-access-czmcf\") pod \"default-interconnect-68864d46cb-qlh7z\" (UID: \"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77\") " pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.384460 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" event={"ID":"63289c99-25e5-4cb9-a2d0-4faf94b64988","Type":"ContainerDied","Data":"3b2dbd375684520c07539667350f58f25afec3c2f25e9bb4c0f6de6fec304afd"} Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.384509 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-mtmlh" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.384515 4985 scope.go:117] "RemoveContainer" containerID="ab75526a2c85ca8716a94658f1ebca425fb484f567b0fa35d6e10d048711a537" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.401147 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.405445 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-mtmlh"] Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.456973 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" Jan 25 00:23:10 crc kubenswrapper[4985]: I0125 00:23:10.685994 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/default-interconnect-68864d46cb-qlh7z"] Jan 25 00:23:12 crc kubenswrapper[4985]: I0125 00:23:12.283470 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="63289c99-25e5-4cb9-a2d0-4faf94b64988" path="/var/lib/kubelet/pods/63289c99-25e5-4cb9-a2d0-4faf94b64988/volumes" Jan 25 00:23:13 crc kubenswrapper[4985]: I0125 00:23:13.416083 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" event={"ID":"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77","Type":"ContainerStarted","Data":"f11f621404908dc5a114860a53175f9a8957e2fb35bc2cab57d13cfdbb571861"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.433681 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerStarted","Data":"f65dd72cebbd701951e2cc5cd825df2a12160ce27e53be9c68ee19b2d222a192"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.437064 4985 scope.go:117] "RemoveContainer" containerID="38537835350a7e3fc8cb1149f3614bcc90d53faa781a06c7d7918a70e830008f" Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.437278 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" event={"ID":"512b3aca-4b83-4d34-8401-a14ea99f20f2","Type":"ContainerStarted","Data":"60e6b310d53ea73ad320e690fdac26eb4257ae10b8745fc0bd85bca81aef4c25"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.437822 4985 scope.go:117] "RemoveContainer" containerID="5a0e0200e70348e376e90a18edd351b6d8e6065c405f80db4bc65a57f59358d4" Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.445629 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerStarted","Data":"196ae5e8bdb03c17e916ecb2e8bec6ee88661cf44704a9aaa3bef4b90a2df053"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.446437 4985 scope.go:117] "RemoveContainer" containerID="26f6364fe48fde502d6ba4ce7ab892e8ff754ba1707af49923fc257e39357a1e" Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.455499 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" event={"ID":"0f416a9f-a874-47ae-b48b-b490193667c4","Type":"ContainerStarted","Data":"dd00ee0b6c16a1cbdcc09f74190465c871963c973ae55425b7590be172e18e16"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.455953 4985 scope.go:117] "RemoveContainer" containerID="c9c6ab615a7caf654d2def94757d5da988c54b5e8e45bb178a633a74ee91a4c0" Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.485717 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerStarted","Data":"40dbf278d98c39fb4b8da90f466600ec6d823821df7c8cef7851c886a2a5e6f7"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.486681 4985 scope.go:117] "RemoveContainer" containerID="dcf938b2f942cf5cff4d37e27aa180ce42d9d9f2b37fdca9fd989ccfe7a51f0a" Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.491343 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" event={"ID":"9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77","Type":"ContainerStarted","Data":"2042ba21d9512ab70c7cba1f29327f124171d452fd6eb8ffbd0702e71cb6470e"} Jan 25 00:23:14 crc kubenswrapper[4985]: I0125 00:23:14.593333 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-interconnect-68864d46cb-qlh7z" podStartSLOduration=7.593312951 podStartE2EDuration="7.593312951s" podCreationTimestamp="2026-01-25 00:23:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:23:14.586128957 +0000 UTC m=+1004.618065260" watchObservedRunningTime="2026-01-25 00:23:14.593312951 +0000 UTC m=+1004.625249234" Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.503073 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" event={"ID":"97e8c495-9a25-42da-a0ab-5a77667b4623","Type":"ContainerStarted","Data":"34dce53a7b2811b76a10827ed9ca5466ad243d5ec0b42c107e080194f072323e"} Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.507905 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" event={"ID":"bfe2073c-f3bf-42e3-8d47-394d4fc025d3","Type":"ContainerStarted","Data":"a77b7a20941b87fa284a465f9d5b6e29305fe28c50020c5ae783799ee251540f"} Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.518430 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" event={"ID":"512b3aca-4b83-4d34-8401-a14ea99f20f2","Type":"ContainerStarted","Data":"9037d472b4b01843143dc31f4a69eadd0227417e2f0efea6b5b680ec09811036"} Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.523711 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" event={"ID":"75635f0e-e1c4-4a69-8893-5cabc8341db3","Type":"ContainerStarted","Data":"5f8bda7ac798831a8d4115005418de89b0f2b7a1b993bc7426398fa09902f656"} Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.527870 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" event={"ID":"0f416a9f-a874-47ae-b48b-b490193667c4","Type":"ContainerStarted","Data":"fe979f4ab170a43e9112b363204df991d3faf0741776a889cd00e16913d47633"} Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.530406 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq" podStartSLOduration=8.122638935 podStartE2EDuration="31.530379862s" podCreationTimestamp="2026-01-25 00:22:44 +0000 UTC" firstStartedPulling="2026-01-25 00:22:51.560343491 +0000 UTC m=+981.592279764" lastFinishedPulling="2026-01-25 00:23:14.968084418 +0000 UTC m=+1005.000020691" observedRunningTime="2026-01-25 00:23:15.52293331 +0000 UTC m=+1005.554869653" watchObservedRunningTime="2026-01-25 00:23:15.530379862 +0000 UTC m=+1005.562316155" Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.557849 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x" podStartSLOduration=2.240331874 podStartE2EDuration="20.557823147s" podCreationTimestamp="2026-01-25 00:22:55 +0000 UTC" firstStartedPulling="2026-01-25 00:22:56.566305809 +0000 UTC m=+986.598242082" lastFinishedPulling="2026-01-25 00:23:14.883797082 +0000 UTC m=+1004.915733355" observedRunningTime="2026-01-25 00:23:15.548758801 +0000 UTC m=+1005.580695094" watchObservedRunningTime="2026-01-25 00:23:15.557823147 +0000 UTC m=+1005.589759440" Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.594968 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg" podStartSLOduration=5.145081353 podStartE2EDuration="28.594947853s" podCreationTimestamp="2026-01-25 00:22:47 +0000 UTC" firstStartedPulling="2026-01-25 00:22:51.508796783 +0000 UTC m=+981.540733056" lastFinishedPulling="2026-01-25 00:23:14.958663283 +0000 UTC m=+1004.990599556" observedRunningTime="2026-01-25 00:23:15.592320512 +0000 UTC m=+1005.624256795" watchObservedRunningTime="2026-01-25 00:23:15.594947853 +0000 UTC m=+1005.626884146" Jan 25 00:23:15 crc kubenswrapper[4985]: I0125 00:23:15.596930 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46" podStartSLOduration=3.872816258 podStartE2EDuration="34.596918817s" podCreationTimestamp="2026-01-25 00:22:41 +0000 UTC" firstStartedPulling="2026-01-25 00:22:44.22815398 +0000 UTC m=+974.260090253" lastFinishedPulling="2026-01-25 00:23:14.952256499 +0000 UTC m=+1004.984192812" observedRunningTime="2026-01-25 00:23:15.571636671 +0000 UTC m=+1005.603572964" watchObservedRunningTime="2026-01-25 00:23:15.596918817 +0000 UTC m=+1005.628855090" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.251371 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t" podStartSLOduration=13.228889557 podStartE2EDuration="32.251341864s" podCreationTimestamp="2026-01-25 00:22:54 +0000 UTC" firstStartedPulling="2026-01-25 00:22:55.929889125 +0000 UTC m=+985.961825388" lastFinishedPulling="2026-01-25 00:23:14.952341382 +0000 UTC m=+1004.984277695" observedRunningTime="2026-01-25 00:23:15.611571774 +0000 UTC m=+1005.643508068" watchObservedRunningTime="2026-01-25 00:23:26.251341864 +0000 UTC m=+1016.283278217" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.252411 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/qdr-test"] Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.254005 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.258077 4985 reflector.go:368] Caches populated for *v1.Secret from object-"service-telemetry"/"default-interconnect-selfsigned" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.258419 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"qdr-test-config" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.309319 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.418736 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjqb7\" (UniqueName: \"kubernetes.io/projected/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-kube-api-access-sjqb7\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.418928 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-qdr-test-config\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.418957 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.521134 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjqb7\" (UniqueName: \"kubernetes.io/projected/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-kube-api-access-sjqb7\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.521605 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-qdr-test-config\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.521752 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.522687 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"qdr-test-config\" (UniqueName: \"kubernetes.io/configmap/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-qdr-test-config\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.528889 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-interconnect-selfsigned-cert\" (UniqueName: \"kubernetes.io/secret/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-default-interconnect-selfsigned-cert\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.550545 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjqb7\" (UniqueName: \"kubernetes.io/projected/dab93b1f-04e3-46bc-ae57-9e38a3ab3c71-kube-api-access-sjqb7\") pod \"qdr-test\" (UID: \"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71\") " pod="service-telemetry/qdr-test" Jan 25 00:23:26 crc kubenswrapper[4985]: I0125 00:23:26.601335 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/qdr-test" Jan 25 00:23:27 crc kubenswrapper[4985]: I0125 00:23:27.051556 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/qdr-test"] Jan 25 00:23:27 crc kubenswrapper[4985]: W0125 00:23:27.059150 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poddab93b1f_04e3_46bc_ae57_9e38a3ab3c71.slice/crio-09aa4d79dcf538101087cbcc39bad0f8bc7cd85c0d663f4101e4d349a292897a WatchSource:0}: Error finding container 09aa4d79dcf538101087cbcc39bad0f8bc7cd85c0d663f4101e4d349a292897a: Status 404 returned error can't find the container with id 09aa4d79dcf538101087cbcc39bad0f8bc7cd85c0d663f4101e4d349a292897a Jan 25 00:23:27 crc kubenswrapper[4985]: I0125 00:23:27.061660 4985 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 25 00:23:27 crc kubenswrapper[4985]: I0125 00:23:27.734928 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71","Type":"ContainerStarted","Data":"09aa4d79dcf538101087cbcc39bad0f8bc7cd85c0d663f4101e4d349a292897a"} Jan 25 00:23:35 crc kubenswrapper[4985]: I0125 00:23:35.836698 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:23:35 crc kubenswrapper[4985]: I0125 00:23:35.838330 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:23:38 crc kubenswrapper[4985]: I0125 00:23:38.826096 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/qdr-test" event={"ID":"dab93b1f-04e3-46bc-ae57-9e38a3ab3c71","Type":"ContainerStarted","Data":"365ec39abda81a2db30d95c25f7d66d21dcd7c75c7b01db262aa197746a0ec87"} Jan 25 00:23:38 crc kubenswrapper[4985]: I0125 00:23:38.872144 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/qdr-test" podStartSLOduration=1.895717281 podStartE2EDuration="12.872085189s" podCreationTimestamp="2026-01-25 00:23:26 +0000 UTC" firstStartedPulling="2026-01-25 00:23:27.061381198 +0000 UTC m=+1017.093317481" lastFinishedPulling="2026-01-25 00:23:38.037749116 +0000 UTC m=+1028.069685389" observedRunningTime="2026-01-25 00:23:38.851537412 +0000 UTC m=+1028.883473695" watchObservedRunningTime="2026-01-25 00:23:38.872085189 +0000 UTC m=+1028.904021502" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.191855 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/stf-smoketest-smoke1-zdhps"] Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.193031 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.195378 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-entrypoint-script" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.195428 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-config" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.195388 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-ceilometer-publisher" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.195894 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-sensubility-config" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.196079 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-collectd-entrypoint-script" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.196181 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"service-telemetry"/"stf-smoketest-healthcheck-log" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.202592 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-zdhps"] Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.246974 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247016 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247040 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247080 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247098 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247221 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.247305 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tdbh\" (UniqueName: \"kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349253 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349310 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349345 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349432 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349457 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.349486 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.351232 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.351256 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.351301 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tdbh\" (UniqueName: \"kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.351834 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.351887 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.352078 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.352367 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.381338 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tdbh\" (UniqueName: \"kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh\") pod \"stf-smoketest-smoke1-zdhps\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.510017 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.657787 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["service-telemetry/curl"] Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.658946 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.666079 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.757918 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nkl6\" (UniqueName: \"kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6\") pod \"curl\" (UID: \"b296b1e3-bf25-42c1-92f1-b9d45a31b705\") " pod="service-telemetry/curl" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.859006 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nkl6\" (UniqueName: \"kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6\") pod \"curl\" (UID: \"b296b1e3-bf25-42c1-92f1-b9d45a31b705\") " pod="service-telemetry/curl" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.898664 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nkl6\" (UniqueName: \"kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6\") pod \"curl\" (UID: \"b296b1e3-bf25-42c1-92f1-b9d45a31b705\") " pod="service-telemetry/curl" Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.960006 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/stf-smoketest-smoke1-zdhps"] Jan 25 00:23:39 crc kubenswrapper[4985]: I0125 00:23:39.986075 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 25 00:23:40 crc kubenswrapper[4985]: I0125 00:23:40.197779 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["service-telemetry/curl"] Jan 25 00:23:40 crc kubenswrapper[4985]: W0125 00:23:40.204249 4985 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb296b1e3_bf25_42c1_92f1_b9d45a31b705.slice/crio-a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8 WatchSource:0}: Error finding container a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8: Status 404 returned error can't find the container with id a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8 Jan 25 00:23:40 crc kubenswrapper[4985]: I0125 00:23:40.844582 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"b296b1e3-bf25-42c1-92f1-b9d45a31b705","Type":"ContainerStarted","Data":"a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8"} Jan 25 00:23:40 crc kubenswrapper[4985]: I0125 00:23:40.845965 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerStarted","Data":"9f2079e0c41313404090d339c1859146503e0124e0307f3bd6d84a3b99996a8d"} Jan 25 00:23:42 crc kubenswrapper[4985]: I0125 00:23:42.903084 4985 generic.go:334] "Generic (PLEG): container finished" podID="b296b1e3-bf25-42c1-92f1-b9d45a31b705" containerID="1bcff95152c71f888b43d5ed4c86628ab38c39549eb4948a84cd75ddd6f67202" exitCode=0 Jan 25 00:23:42 crc kubenswrapper[4985]: I0125 00:23:42.903351 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"b296b1e3-bf25-42c1-92f1-b9d45a31b705","Type":"ContainerDied","Data":"1bcff95152c71f888b43d5ed4c86628ab38c39549eb4948a84cd75ddd6f67202"} Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.180802 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.325465 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nkl6\" (UniqueName: \"kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6\") pod \"b296b1e3-bf25-42c1-92f1-b9d45a31b705\" (UID: \"b296b1e3-bf25-42c1-92f1-b9d45a31b705\") " Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.341202 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6" (OuterVolumeSpecName: "kube-api-access-5nkl6") pod "b296b1e3-bf25-42c1-92f1-b9d45a31b705" (UID: "b296b1e3-bf25-42c1-92f1-b9d45a31b705"). InnerVolumeSpecName "kube-api-access-5nkl6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.360770 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_curl_b296b1e3-bf25-42c1-92f1-b9d45a31b705/curl/0.log" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.428226 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nkl6\" (UniqueName: \"kubernetes.io/projected/b296b1e3-bf25-42c1-92f1-b9d45a31b705-kube-api-access-5nkl6\") on node \"crc\" DevicePath \"\"" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.676393 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-kbwgq_81a908e4-c470-4f1b-9a45-6a7d339c8749/prometheus-webhook-snmp/0.log" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.918297 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/curl" event={"ID":"b296b1e3-bf25-42c1-92f1-b9d45a31b705","Type":"ContainerDied","Data":"a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8"} Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.918331 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/curl" Jan 25 00:23:44 crc kubenswrapper[4985]: I0125 00:23:44.918339 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a2b30c598a540914c0e774e02c471eaa9c76e77437f2904b39647a6f42aaa9f8" Jan 25 00:23:57 crc kubenswrapper[4985]: I0125 00:23:57.048860 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerStarted","Data":"0e2ae4e35751b4477b332e029e0b21a39b594a37b2bc0ea2c22ca60c323dabb3"} Jan 25 00:24:05 crc kubenswrapper[4985]: I0125 00:24:05.836856 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:24:05 crc kubenswrapper[4985]: I0125 00:24:05.837497 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:24:05 crc kubenswrapper[4985]: I0125 00:24:05.837549 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:24:05 crc kubenswrapper[4985]: I0125 00:24:05.838257 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:24:05 crc kubenswrapper[4985]: I0125 00:24:05.838321 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d" gracePeriod=600 Jan 25 00:24:06 crc kubenswrapper[4985]: I0125 00:24:06.112638 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d" exitCode=0 Jan 25 00:24:06 crc kubenswrapper[4985]: I0125 00:24:06.112814 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d"} Jan 25 00:24:06 crc kubenswrapper[4985]: I0125 00:24:06.113047 4985 scope.go:117] "RemoveContainer" containerID="9754fcac108cedae18ecde93349a9806fb16716055497b233b48c38927bdac01" Jan 25 00:24:06 crc kubenswrapper[4985]: I0125 00:24:06.115264 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerStarted","Data":"f3a23dbf9a4eaf10e3c9ef487e8bc7213d4468efe8e369bda8ca1658b0196334"} Jan 25 00:24:06 crc kubenswrapper[4985]: I0125 00:24:06.133412 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="service-telemetry/stf-smoketest-smoke1-zdhps" podStartSLOduration=1.3265526109999999 podStartE2EDuration="27.133385476s" podCreationTimestamp="2026-01-25 00:23:39 +0000 UTC" firstStartedPulling="2026-01-25 00:23:39.966427316 +0000 UTC m=+1029.998363589" lastFinishedPulling="2026-01-25 00:24:05.773260141 +0000 UTC m=+1055.805196454" observedRunningTime="2026-01-25 00:24:06.132300109 +0000 UTC m=+1056.164236422" watchObservedRunningTime="2026-01-25 00:24:06.133385476 +0000 UTC m=+1056.165321779" Jan 25 00:24:07 crc kubenswrapper[4985]: I0125 00:24:07.124474 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"6059e6ff089d7f5e5ee35f49c544101eb92dc9b9c16e25922cd65befc579043e"} Jan 25 00:24:14 crc kubenswrapper[4985]: I0125 00:24:14.848533 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-kbwgq_81a908e4-c470-4f1b-9a45-6a7d339c8749/prometheus-webhook-snmp/0.log" Jan 25 00:24:30 crc kubenswrapper[4985]: I0125 00:24:30.304920 4985 generic.go:334] "Generic (PLEG): container finished" podID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerID="0e2ae4e35751b4477b332e029e0b21a39b594a37b2bc0ea2c22ca60c323dabb3" exitCode=0 Jan 25 00:24:30 crc kubenswrapper[4985]: I0125 00:24:30.305014 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerDied","Data":"0e2ae4e35751b4477b332e029e0b21a39b594a37b2bc0ea2c22ca60c323dabb3"} Jan 25 00:24:30 crc kubenswrapper[4985]: I0125 00:24:30.305958 4985 scope.go:117] "RemoveContainer" containerID="0e2ae4e35751b4477b332e029e0b21a39b594a37b2bc0ea2c22ca60c323dabb3" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.769903 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:36 crc kubenswrapper[4985]: E0125 00:24:36.770830 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b296b1e3-bf25-42c1-92f1-b9d45a31b705" containerName="curl" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.770850 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="b296b1e3-bf25-42c1-92f1-b9d45a31b705" containerName="curl" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.771075 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="b296b1e3-bf25-42c1-92f1-b9d45a31b705" containerName="curl" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.772595 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.776459 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.818362 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8j6z\" (UniqueName: \"kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.818426 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.818458 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.919774 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8j6z\" (UniqueName: \"kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.919846 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.919879 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.920385 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.920399 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:36 crc kubenswrapper[4985]: I0125 00:24:36.941573 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8j6z\" (UniqueName: \"kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z\") pod \"certified-operators-85xls\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:37 crc kubenswrapper[4985]: I0125 00:24:37.096332 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:37 crc kubenswrapper[4985]: I0125 00:24:37.589478 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:38 crc kubenswrapper[4985]: I0125 00:24:38.400908 4985 generic.go:334] "Generic (PLEG): container finished" podID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerID="f3a23dbf9a4eaf10e3c9ef487e8bc7213d4468efe8e369bda8ca1658b0196334" exitCode=0 Jan 25 00:24:38 crc kubenswrapper[4985]: I0125 00:24:38.400982 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerDied","Data":"f3a23dbf9a4eaf10e3c9ef487e8bc7213d4468efe8e369bda8ca1658b0196334"} Jan 25 00:24:38 crc kubenswrapper[4985]: I0125 00:24:38.402891 4985 generic.go:334] "Generic (PLEG): container finished" podID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerID="2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531" exitCode=0 Jan 25 00:24:38 crc kubenswrapper[4985]: I0125 00:24:38.402924 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerDied","Data":"2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531"} Jan 25 00:24:38 crc kubenswrapper[4985]: I0125 00:24:38.402944 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerStarted","Data":"9b0805b8daa35f95e903a07096c3a43f89b7b380f099592a0934f6a78b7cc746"} Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.706939 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765278 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tdbh\" (UniqueName: \"kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765361 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765414 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765448 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765512 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765536 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.765563 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log\") pod \"84287a5e-f4fe-41ab-8081-38f70043e8e5\" (UID: \"84287a5e-f4fe-41ab-8081-38f70043e8e5\") " Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.801457 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh" (OuterVolumeSpecName: "kube-api-access-4tdbh") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "kube-api-access-4tdbh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.801720 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher" (OuterVolumeSpecName: "ceilometer-publisher") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "ceilometer-publisher". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.805969 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log" (OuterVolumeSpecName: "healthcheck-log") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "healthcheck-log". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.810031 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config" (OuterVolumeSpecName: "collectd-config") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "collectd-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.813483 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script" (OuterVolumeSpecName: "collectd-entrypoint-script") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "collectd-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.816696 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script" (OuterVolumeSpecName: "ceilometer-entrypoint-script") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "ceilometer-entrypoint-script". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.829613 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config" (OuterVolumeSpecName: "sensubility-config") pod "84287a5e-f4fe-41ab-8081-38f70043e8e5" (UID: "84287a5e-f4fe-41ab-8081-38f70043e8e5"). InnerVolumeSpecName "sensubility-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867568 4985 reconciler_common.go:293] "Volume detached for volume \"ceilometer-publisher\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-publisher\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867605 4985 reconciler_common.go:293] "Volume detached for volume \"collectd-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867615 4985 reconciler_common.go:293] "Volume detached for volume \"ceilometer-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-ceilometer-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867635 4985 reconciler_common.go:293] "Volume detached for volume \"sensubility-config\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-sensubility-config\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867651 4985 reconciler_common.go:293] "Volume detached for volume \"healthcheck-log\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-healthcheck-log\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867665 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tdbh\" (UniqueName: \"kubernetes.io/projected/84287a5e-f4fe-41ab-8081-38f70043e8e5-kube-api-access-4tdbh\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:39 crc kubenswrapper[4985]: I0125 00:24:39.867675 4985 reconciler_common.go:293] "Volume detached for volume \"collectd-entrypoint-script\" (UniqueName: \"kubernetes.io/configmap/84287a5e-f4fe-41ab-8081-38f70043e8e5-collectd-entrypoint-script\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:40 crc kubenswrapper[4985]: I0125 00:24:40.418627 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="service-telemetry/stf-smoketest-smoke1-zdhps" event={"ID":"84287a5e-f4fe-41ab-8081-38f70043e8e5","Type":"ContainerDied","Data":"9f2079e0c41313404090d339c1859146503e0124e0307f3bd6d84a3b99996a8d"} Jan 25 00:24:40 crc kubenswrapper[4985]: I0125 00:24:40.418912 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f2079e0c41313404090d339c1859146503e0124e0307f3bd6d84a3b99996a8d" Jan 25 00:24:40 crc kubenswrapper[4985]: I0125 00:24:40.418686 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="service-telemetry/stf-smoketest-smoke1-zdhps" Jan 25 00:24:40 crc kubenswrapper[4985]: I0125 00:24:40.421378 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerStarted","Data":"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8"} Jan 25 00:24:41 crc kubenswrapper[4985]: I0125 00:24:41.428839 4985 generic.go:334] "Generic (PLEG): container finished" podID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerID="a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8" exitCode=0 Jan 25 00:24:41 crc kubenswrapper[4985]: I0125 00:24:41.428888 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerDied","Data":"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8"} Jan 25 00:24:42 crc kubenswrapper[4985]: I0125 00:24:42.032625 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-zdhps_84287a5e-f4fe-41ab-8081-38f70043e8e5/smoketest-collectd/0.log" Jan 25 00:24:42 crc kubenswrapper[4985]: I0125 00:24:42.354475 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_stf-smoketest-smoke1-zdhps_84287a5e-f4fe-41ab-8081-38f70043e8e5/smoketest-ceilometer/0.log" Jan 25 00:24:42 crc kubenswrapper[4985]: I0125 00:24:42.726266 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-interconnect-68864d46cb-qlh7z_9cdaefde-d1c2-4e5f-b6aa-e797b9e8dc77/default-interconnect/0.log" Jan 25 00:24:43 crc kubenswrapper[4985]: I0125 00:24:43.079065 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46_bfe2073c-f3bf-42e3-8d47-394d4fc025d3/bridge/1.log" Jan 25 00:24:43 crc kubenswrapper[4985]: I0125 00:24:43.406228 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-meter-smartgateway-7996dc9458-9dw46_bfe2073c-f3bf-42e3-8d47-394d4fc025d3/sg-core/0.log" Jan 25 00:24:43 crc kubenswrapper[4985]: I0125 00:24:43.443138 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerStarted","Data":"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b"} Jan 25 00:24:43 crc kubenswrapper[4985]: I0125 00:24:43.479401 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-85xls" podStartSLOduration=3.265665554 podStartE2EDuration="7.479378887s" podCreationTimestamp="2026-01-25 00:24:36 +0000 UTC" firstStartedPulling="2026-01-25 00:24:38.405252734 +0000 UTC m=+1088.437188997" lastFinishedPulling="2026-01-25 00:24:42.618966067 +0000 UTC m=+1092.650902330" observedRunningTime="2026-01-25 00:24:43.474544211 +0000 UTC m=+1093.506480494" watchObservedRunningTime="2026-01-25 00:24:43.479378887 +0000 UTC m=+1093.511315170" Jan 25 00:24:43 crc kubenswrapper[4985]: I0125 00:24:43.765786 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t_0f416a9f-a874-47ae-b48b-b490193667c4/bridge/1.log" Jan 25 00:24:44 crc kubenswrapper[4985]: I0125 00:24:44.091481 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-coll-event-smartgateway-687bb6d468-jpb8t_0f416a9f-a874-47ae-b48b-b490193667c4/sg-core/0.log" Jan 25 00:24:44 crc kubenswrapper[4985]: I0125 00:24:44.408421 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq_97e8c495-9a25-42da-a0ab-5a77667b4623/bridge/1.log" Jan 25 00:24:44 crc kubenswrapper[4985]: I0125 00:24:44.779573 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-meter-smartgateway-b57f974ff-z4rlq_97e8c495-9a25-42da-a0ab-5a77667b4623/sg-core/0.log" Jan 25 00:24:45 crc kubenswrapper[4985]: I0125 00:24:45.119821 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x_512b3aca-4b83-4d34-8401-a14ea99f20f2/bridge/1.log" Jan 25 00:24:45 crc kubenswrapper[4985]: I0125 00:24:45.405708 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-ceil-event-smartgateway-c8857b758-xxr4x_512b3aca-4b83-4d34-8401-a14ea99f20f2/sg-core/0.log" Jan 25 00:24:45 crc kubenswrapper[4985]: I0125 00:24:45.747913 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg_75635f0e-e1c4-4a69-8893-5cabc8341db3/bridge/1.log" Jan 25 00:24:46 crc kubenswrapper[4985]: I0125 00:24:46.087959 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-cloud1-sens-meter-smartgateway-6864f4fb65-jpscg_75635f0e-e1c4-4a69-8893-5cabc8341db3/sg-core/0.log" Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.096407 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.097353 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.148313 4985 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.522166 4985 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.576381 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:47 crc kubenswrapper[4985]: I0125 00:24:47.721411 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-wnp6h_1f53aed3-9734-4492-bd91-ba42c17ae773/operator/0.log" Jan 25 00:24:48 crc kubenswrapper[4985]: I0125 00:24:48.154383 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_prometheus-default-0_5c8a5a59-9f83-491e-8fd0-93b4cc4941cc/prometheus/0.log" Jan 25 00:24:48 crc kubenswrapper[4985]: I0125 00:24:48.518716 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_elasticsearch-es-default-0_ceb530c6-05c0-4e6f-a0cb-100077e6777e/elasticsearch/0.log" Jan 25 00:24:48 crc kubenswrapper[4985]: I0125 00:24:48.879946 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_default-snmp-webhook-78bcbbdcff-kbwgq_81a908e4-c470-4f1b-9a45-6a7d339c8749/prometheus-webhook-snmp/0.log" Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.237753 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_alertmanager-default-0_049acf2f-5927-42db-8380-a735fd0804b3/alertmanager/0.log" Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.487277 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-85xls" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="registry-server" containerID="cri-o://bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b" gracePeriod=2 Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.893477 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.934266 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z8j6z\" (UniqueName: \"kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z\") pod \"9c98eac9-9532-4f53-a29b-d2d7157d29df\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.934319 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities\") pod \"9c98eac9-9532-4f53-a29b-d2d7157d29df\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.934402 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content\") pod \"9c98eac9-9532-4f53-a29b-d2d7157d29df\" (UID: \"9c98eac9-9532-4f53-a29b-d2d7157d29df\") " Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.935116 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities" (OuterVolumeSpecName: "utilities") pod "9c98eac9-9532-4f53-a29b-d2d7157d29df" (UID: "9c98eac9-9532-4f53-a29b-d2d7157d29df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:24:49 crc kubenswrapper[4985]: I0125 00:24:49.942706 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z" (OuterVolumeSpecName: "kube-api-access-z8j6z") pod "9c98eac9-9532-4f53-a29b-d2d7157d29df" (UID: "9c98eac9-9532-4f53-a29b-d2d7157d29df"). InnerVolumeSpecName "kube-api-access-z8j6z". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.008123 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c98eac9-9532-4f53-a29b-d2d7157d29df" (UID: "9c98eac9-9532-4f53-a29b-d2d7157d29df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.035684 4985 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.035720 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z8j6z\" (UniqueName: \"kubernetes.io/projected/9c98eac9-9532-4f53-a29b-d2d7157d29df-kube-api-access-z8j6z\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.035732 4985 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c98eac9-9532-4f53-a29b-d2d7157d29df-utilities\") on node \"crc\" DevicePath \"\"" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.497040 4985 generic.go:334] "Generic (PLEG): container finished" podID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerID="bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b" exitCode=0 Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.497078 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-85xls" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.497117 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerDied","Data":"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b"} Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.497435 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-85xls" event={"ID":"9c98eac9-9532-4f53-a29b-d2d7157d29df","Type":"ContainerDied","Data":"9b0805b8daa35f95e903a07096c3a43f89b7b380f099592a0934f6a78b7cc746"} Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.497459 4985 scope.go:117] "RemoveContainer" containerID="bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.533305 4985 scope.go:117] "RemoveContainer" containerID="a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.536071 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.541818 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-85xls"] Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.566915 4985 scope.go:117] "RemoveContainer" containerID="2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.588299 4985 scope.go:117] "RemoveContainer" containerID="bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b" Jan 25 00:24:50 crc kubenswrapper[4985]: E0125 00:24:50.588770 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b\": container with ID starting with bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b not found: ID does not exist" containerID="bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.588823 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b"} err="failed to get container status \"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b\": rpc error: code = NotFound desc = could not find container \"bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b\": container with ID starting with bed713414167e6210f83b5dab44a071698c03a92f622a4e596dd5a797fbdd28b not found: ID does not exist" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.588856 4985 scope.go:117] "RemoveContainer" containerID="a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8" Jan 25 00:24:50 crc kubenswrapper[4985]: E0125 00:24:50.589328 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8\": container with ID starting with a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8 not found: ID does not exist" containerID="a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.589373 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8"} err="failed to get container status \"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8\": rpc error: code = NotFound desc = could not find container \"a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8\": container with ID starting with a7ff8381edcae59efb867e3e3d5a86a006a5d8a2852c380ef5ffe8a4ce8c82b8 not found: ID does not exist" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.589400 4985 scope.go:117] "RemoveContainer" containerID="2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531" Jan 25 00:24:50 crc kubenswrapper[4985]: E0125 00:24:50.589664 4985 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531\": container with ID starting with 2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531 not found: ID does not exist" containerID="2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531" Jan 25 00:24:50 crc kubenswrapper[4985]: I0125 00:24:50.589699 4985 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531"} err="failed to get container status \"2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531\": rpc error: code = NotFound desc = could not find container \"2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531\": container with ID starting with 2fc25cd4cbae62fc8e7827b917c7a6a1f506c0ea89e2a083d60b1c0e372c2531 not found: ID does not exist" Jan 25 00:24:52 crc kubenswrapper[4985]: I0125 00:24:52.282363 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" path="/var/lib/kubelet/pods/9c98eac9-9532-4f53-a29b-d2d7157d29df/volumes" Jan 25 00:25:03 crc kubenswrapper[4985]: I0125 00:25:03.554993 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_service-telemetry-operator-55b89ddfb9-sp9wb_8db9b718-8110-4d6a-9082-26ebabcf60c6/operator/0.log" Jan 25 00:25:05 crc kubenswrapper[4985]: I0125 00:25:05.607373 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_smart-gateway-operator-bbbc889bc-wnp6h_1f53aed3-9734-4492-bd91-ba42c17ae773/operator/0.log" Jan 25 00:25:05 crc kubenswrapper[4985]: I0125 00:25:05.955670 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/service-telemetry_qdr-test_dab93b1f-04e3-46bc-ae57-9e38a3ab3c71/qdr/0.log" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.046428 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-blsh4/must-gather-2v9h6"] Jan 25 00:25:33 crc kubenswrapper[4985]: E0125 00:25:33.047362 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="registry-server" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047378 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="registry-server" Jan 25 00:25:33 crc kubenswrapper[4985]: E0125 00:25:33.047391 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-collectd" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047400 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-collectd" Jan 25 00:25:33 crc kubenswrapper[4985]: E0125 00:25:33.047414 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="extract-content" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047422 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="extract-content" Jan 25 00:25:33 crc kubenswrapper[4985]: E0125 00:25:33.047432 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="extract-utilities" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047440 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="extract-utilities" Jan 25 00:25:33 crc kubenswrapper[4985]: E0125 00:25:33.047461 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-ceilometer" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047469 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-ceilometer" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047631 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-collectd" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047651 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c98eac9-9532-4f53-a29b-d2d7157d29df" containerName="registry-server" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.047670 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="84287a5e-f4fe-41ab-8081-38f70043e8e5" containerName="smoketest-ceilometer" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.048515 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.051655 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-blsh4"/"openshift-service-ca.crt" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.053051 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-blsh4"/"kube-root-ca.crt" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.091819 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-blsh4/must-gather-2v9h6"] Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.150949 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtjp9\" (UniqueName: \"kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.151026 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.252784 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtjp9\" (UniqueName: \"kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.252843 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.253235 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.269485 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtjp9\" (UniqueName: \"kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9\") pod \"must-gather-2v9h6\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.397399 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.594911 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-blsh4/must-gather-2v9h6"] Jan 25 00:25:33 crc kubenswrapper[4985]: I0125 00:25:33.880875 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-blsh4/must-gather-2v9h6" event={"ID":"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b","Type":"ContainerStarted","Data":"5cee2f7779a72f393df99e859cea63d1860ec02dd5e48cd507caecc18f24bcbf"} Jan 25 00:25:42 crc kubenswrapper[4985]: I0125 00:25:42.951888 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-blsh4/must-gather-2v9h6" event={"ID":"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b","Type":"ContainerStarted","Data":"f159cea8698d5bad23d243bb452357a5ad42ee8fb1c840a72347bf5d02df647e"} Jan 25 00:25:42 crc kubenswrapper[4985]: I0125 00:25:42.952583 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-blsh4/must-gather-2v9h6" event={"ID":"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b","Type":"ContainerStarted","Data":"214cb5f81f8246245a8c50be2fa5a238eae2c982f165310e62cebbdfc7f85dc7"} Jan 25 00:25:42 crc kubenswrapper[4985]: I0125 00:25:42.967962 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-blsh4/must-gather-2v9h6" podStartSLOduration=1.664611193 podStartE2EDuration="9.967945631s" podCreationTimestamp="2026-01-25 00:25:33 +0000 UTC" firstStartedPulling="2026-01-25 00:25:33.600868443 +0000 UTC m=+1143.632804716" lastFinishedPulling="2026-01-25 00:25:41.904202881 +0000 UTC m=+1151.936139154" observedRunningTime="2026-01-25 00:25:42.965716271 +0000 UTC m=+1152.997652544" watchObservedRunningTime="2026-01-25 00:25:42.967945631 +0000 UTC m=+1152.999881904" Jan 25 00:26:27 crc kubenswrapper[4985]: I0125 00:26:27.177523 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-7tgz4_72c63de9-5d4f-4037-b70e-11ddf9a4904c/control-plane-machine-set-operator/0.log" Jan 25 00:26:27 crc kubenswrapper[4985]: I0125 00:26:27.306855 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bnmp5_e74fd6cc-f34d-41c4-8d01-0f556277340d/kube-rbac-proxy/0.log" Jan 25 00:26:27 crc kubenswrapper[4985]: I0125 00:26:27.353634 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bnmp5_e74fd6cc-f34d-41c4-8d01-0f556277340d/machine-api-operator/0.log" Jan 25 00:26:35 crc kubenswrapper[4985]: I0125 00:26:35.836750 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:26:35 crc kubenswrapper[4985]: I0125 00:26:35.837530 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:26:40 crc kubenswrapper[4985]: I0125 00:26:40.180509 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-86cb77c54b-mq7gd_e75df2f5-fd78-42d9-8bb1-65e73697c5bc/cert-manager-controller/0.log" Jan 25 00:26:40 crc kubenswrapper[4985]: I0125 00:26:40.287955 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-855d9ccff4-z9zrf_1e15f504-62d6-4982-9729-34f3bbd5f784/cert-manager-cainjector/0.log" Jan 25 00:26:40 crc kubenswrapper[4985]: I0125 00:26:40.431945 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-f4fb5df64-5kq2z_a5f5a37d-d901-4929-90c0-91d99f3cd65b/cert-manager-webhook/0.log" Jan 25 00:26:54 crc kubenswrapper[4985]: I0125 00:26:54.178276 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-85xxr_16badb6b-d0b6-454e-a544-6811966984a6/prometheus-operator/0.log" Jan 25 00:26:54 crc kubenswrapper[4985]: I0125 00:26:54.337659 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-954995c76-q6cxx_ba5b943c-3070-4afc-b57c-fb0be23f2a32/prometheus-operator-admission-webhook/0.log" Jan 25 00:26:54 crc kubenswrapper[4985]: I0125 00:26:54.392055 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-954995c76-xnqrt_b1769099-2ab1-4b9b-b373-dc9e096c14d9/prometheus-operator-admission-webhook/0.log" Jan 25 00:26:54 crc kubenswrapper[4985]: I0125 00:26:54.506947 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-t7dvp_ded8ba3b-7925-430a-a595-93b3b00ae9b5/operator/0.log" Jan 25 00:26:54 crc kubenswrapper[4985]: I0125 00:26:54.534076 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-xv6dg_5221e9c8-1162-482b-8120-6dd97c481304/perses-operator/0.log" Jan 25 00:27:05 crc kubenswrapper[4985]: I0125 00:27:05.835783 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:27:05 crc kubenswrapper[4985]: I0125 00:27:05.836569 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.062161 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/util/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.289176 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/util/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.382819 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/pull/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.384684 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/pull/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.489501 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/util/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.500053 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/pull/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.588660 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_1f59f640c8a0eb1a7b0f26c81382bbdde784d03eb439a940bb8da3931acdnh5_a9856c83-fe7c-4653-8d19-114a5b040a73/extract/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.655819 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/util/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.792198 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/util/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.815968 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/pull/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.829315 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/pull/0.log" Jan 25 00:27:10 crc kubenswrapper[4985]: I0125 00:27:10.993508 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.010919 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/extract/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.032482 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_6e3e74c24700cc2bb66271d960117ff0976dc779e6a3bc37905b952e8flx4z5_2acbb776-e15d-49e2-b2e0-4e1e1ad82ddc/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.158839 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.310406 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.326072 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.329054 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.491391 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.492651 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.526837 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8ed862a309935d5a1c8012df79b93f7fb46e029d4689f7f6ddcb9e7f5e59qzs_04fd0691-1cb5-4e06-80e8-cb251c8cf4d3/extract/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.652532 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.781556 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.787413 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.792865 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/pull/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.964081 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/util/0.log" Jan 25 00:27:11 crc kubenswrapper[4985]: I0125 00:27:11.985420 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/pull/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.056822 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_98629960b44b381d1a86cff1d1439a8df43509c9ad24579158c59d0f087ds68_1e0b8567-c91d-4d4e-a083-470f089b611c/extract/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.258489 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-utilities/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.376718 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-utilities/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.377006 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-content/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.382765 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-content/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.581929 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-utilities/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.583475 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/extract-content/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.827855 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-utilities/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.978610 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-xpplg_c83724a9-f8b9-4170-993e-55a4801d0b9b/registry-server/0.log" Jan 25 00:27:12 crc kubenswrapper[4985]: I0125 00:27:12.979144 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-utilities/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.049929 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-content/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.052755 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-content/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.159648 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-utilities/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.192174 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/extract-content/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.348425 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-rb5dx_6a7626ea-b9ae-40c0-a15d-26059903fb75/marketplace-operator/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.384834 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-utilities/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.530428 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-utilities/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.543604 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-content/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.559599 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-content/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.678377 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-utilities/0.log" Jan 25 00:27:13 crc kubenswrapper[4985]: I0125 00:27:13.697467 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/extract-content/0.log" Jan 25 00:27:14 crc kubenswrapper[4985]: I0125 00:27:14.466550 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-r65ns_3f7c4664-a09a-4bf7-a974-06d53cd11f51/registry-server/0.log" Jan 25 00:27:15 crc kubenswrapper[4985]: I0125 00:27:15.769486 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d766v_2ba64434-86dd-4e4c-8586-b55306d5162e/registry-server/0.log" Jan 25 00:27:26 crc kubenswrapper[4985]: I0125 00:27:26.265054 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-68bc856cb9-85xxr_16badb6b-d0b6-454e-a544-6811966984a6/prometheus-operator/0.log" Jan 25 00:27:26 crc kubenswrapper[4985]: I0125 00:27:26.272150 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-954995c76-q6cxx_ba5b943c-3070-4afc-b57c-fb0be23f2a32/prometheus-operator-admission-webhook/0.log" Jan 25 00:27:26 crc kubenswrapper[4985]: I0125 00:27:26.321245 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_obo-prometheus-operator-admission-webhook-954995c76-xnqrt_b1769099-2ab1-4b9b-b373-dc9e096c14d9/prometheus-operator-admission-webhook/0.log" Jan 25 00:27:26 crc kubenswrapper[4985]: I0125 00:27:26.438791 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_perses-operator-5bf474d74f-xv6dg_5221e9c8-1162-482b-8120-6dd97c481304/perses-operator/0.log" Jan 25 00:27:26 crc kubenswrapper[4985]: I0125 00:27:26.441281 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-operators_observability-operator-59bdc8b94-t7dvp_ded8ba3b-7925-430a-a595-93b3b00ae9b5/operator/0.log" Jan 25 00:27:35 crc kubenswrapper[4985]: I0125 00:27:35.835821 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:27:35 crc kubenswrapper[4985]: I0125 00:27:35.838397 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:27:35 crc kubenswrapper[4985]: I0125 00:27:35.838648 4985 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" Jan 25 00:27:35 crc kubenswrapper[4985]: I0125 00:27:35.839909 4985 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6059e6ff089d7f5e5ee35f49c544101eb92dc9b9c16e25922cd65befc579043e"} pod="openshift-machine-config-operator/machine-config-daemon-dddxc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 25 00:27:35 crc kubenswrapper[4985]: I0125 00:27:35.840217 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" containerID="cri-o://6059e6ff089d7f5e5ee35f49c544101eb92dc9b9c16e25922cd65befc579043e" gracePeriod=600 Jan 25 00:27:36 crc kubenswrapper[4985]: I0125 00:27:36.808073 4985 generic.go:334] "Generic (PLEG): container finished" podID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerID="6059e6ff089d7f5e5ee35f49c544101eb92dc9b9c16e25922cd65befc579043e" exitCode=0 Jan 25 00:27:36 crc kubenswrapper[4985]: I0125 00:27:36.808156 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerDied","Data":"6059e6ff089d7f5e5ee35f49c544101eb92dc9b9c16e25922cd65befc579043e"} Jan 25 00:27:36 crc kubenswrapper[4985]: I0125 00:27:36.808660 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" event={"ID":"5fa83abe-5c61-40a5-bf77-d8f929bdda78","Type":"ContainerStarted","Data":"4e63cb50ac1015bd99e74047dc2d1e701a48ee00744c287798c6f1ddb19f024d"} Jan 25 00:27:36 crc kubenswrapper[4985]: I0125 00:27:36.808687 4985 scope.go:117] "RemoveContainer" containerID="62dc661fb816fd2fd6357957822aa99e2fb982982064c42d0d36bc76cfe09d3d" Jan 25 00:28:17 crc kubenswrapper[4985]: I0125 00:28:17.165480 4985 generic.go:334] "Generic (PLEG): container finished" podID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerID="214cb5f81f8246245a8c50be2fa5a238eae2c982f165310e62cebbdfc7f85dc7" exitCode=0 Jan 25 00:28:17 crc kubenswrapper[4985]: I0125 00:28:17.165618 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-blsh4/must-gather-2v9h6" event={"ID":"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b","Type":"ContainerDied","Data":"214cb5f81f8246245a8c50be2fa5a238eae2c982f165310e62cebbdfc7f85dc7"} Jan 25 00:28:17 crc kubenswrapper[4985]: I0125 00:28:17.166675 4985 scope.go:117] "RemoveContainer" containerID="214cb5f81f8246245a8c50be2fa5a238eae2c982f165310e62cebbdfc7f85dc7" Jan 25 00:28:18 crc kubenswrapper[4985]: I0125 00:28:18.130771 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-blsh4_must-gather-2v9h6_caef0ee4-a055-4f06-ba24-6fff3ba1ac7b/gather/0.log" Jan 25 00:28:24 crc kubenswrapper[4985]: I0125 00:28:24.882883 4985 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-blsh4/must-gather-2v9h6"] Jan 25 00:28:24 crc kubenswrapper[4985]: I0125 00:28:24.883610 4985 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-blsh4/must-gather-2v9h6" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="copy" containerID="cri-o://f159cea8698d5bad23d243bb452357a5ad42ee8fb1c840a72347bf5d02df647e" gracePeriod=2 Jan 25 00:28:24 crc kubenswrapper[4985]: I0125 00:28:24.889527 4985 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-blsh4/must-gather-2v9h6"] Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.227725 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-blsh4_must-gather-2v9h6_caef0ee4-a055-4f06-ba24-6fff3ba1ac7b/copy/0.log" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.228306 4985 generic.go:334] "Generic (PLEG): container finished" podID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerID="f159cea8698d5bad23d243bb452357a5ad42ee8fb1c840a72347bf5d02df647e" exitCode=143 Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.228366 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cee2f7779a72f393df99e859cea63d1860ec02dd5e48cd507caecc18f24bcbf" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.241480 4985 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-blsh4_must-gather-2v9h6_caef0ee4-a055-4f06-ba24-6fff3ba1ac7b/copy/0.log" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.241906 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.414018 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtjp9\" (UniqueName: \"kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9\") pod \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.414202 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output\") pod \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\" (UID: \"caef0ee4-a055-4f06-ba24-6fff3ba1ac7b\") " Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.420300 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9" (OuterVolumeSpecName: "kube-api-access-qtjp9") pod "caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" (UID: "caef0ee4-a055-4f06-ba24-6fff3ba1ac7b"). InnerVolumeSpecName "kube-api-access-qtjp9". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.466776 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" (UID: "caef0ee4-a055-4f06-ba24-6fff3ba1ac7b"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.516202 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtjp9\" (UniqueName: \"kubernetes.io/projected/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-kube-api-access-qtjp9\") on node \"crc\" DevicePath \"\"" Jan 25 00:28:25 crc kubenswrapper[4985]: I0125 00:28:25.516245 4985 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b-must-gather-output\") on node \"crc\" DevicePath \"\"" Jan 25 00:28:26 crc kubenswrapper[4985]: I0125 00:28:26.238484 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-blsh4/must-gather-2v9h6" Jan 25 00:28:26 crc kubenswrapper[4985]: I0125 00:28:26.289167 4985 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" path="/var/lib/kubelet/pods/caef0ee4-a055-4f06-ba24-6fff3ba1ac7b/volumes" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.163884 4985 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r"] Jan 25 00:30:00 crc kubenswrapper[4985]: E0125 00:30:00.165166 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="copy" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.165199 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="copy" Jan 25 00:30:00 crc kubenswrapper[4985]: E0125 00:30:00.165272 4985 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="gather" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.165290 4985 state_mem.go:107] "Deleted CPUSet assignment" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="gather" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.165652 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="gather" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.165696 4985 memory_manager.go:354] "RemoveStaleState removing state" podUID="caef0ee4-a055-4f06-ba24-6fff3ba1ac7b" containerName="copy" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.167978 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.178742 4985 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.178978 4985 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.189693 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r"] Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.322787 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.323145 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.323377 4985 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2krt\" (UniqueName: \"kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.424805 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.424857 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.425051 4985 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2krt\" (UniqueName: \"kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.425703 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.435373 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.442215 4985 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2krt\" (UniqueName: \"kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt\") pod \"collect-profiles-29488350-w2n4r\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.503572 4985 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:00 crc kubenswrapper[4985]: I0125 00:30:00.736188 4985 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r"] Jan 25 00:30:01 crc kubenswrapper[4985]: I0125 00:30:01.034873 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" event={"ID":"8477ac51-35d6-4330-8796-cf72569fa341","Type":"ContainerStarted","Data":"2ea13a1eb266b159ef5a05eda47f002c8db3f17ddbc0278d1c312c9a320fc251"} Jan 25 00:30:01 crc kubenswrapper[4985]: I0125 00:30:01.034918 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" event={"ID":"8477ac51-35d6-4330-8796-cf72569fa341","Type":"ContainerStarted","Data":"9f7f32deb7fa0509493f9ea4763ecbf06ec4a908db2921d92f05990d15f30857"} Jan 25 00:30:01 crc kubenswrapper[4985]: I0125 00:30:01.049424 4985 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" podStartSLOduration=1.049408222 podStartE2EDuration="1.049408222s" podCreationTimestamp="2026-01-25 00:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-25 00:30:01.047318927 +0000 UTC m=+1411.079255210" watchObservedRunningTime="2026-01-25 00:30:01.049408222 +0000 UTC m=+1411.081344495" Jan 25 00:30:02 crc kubenswrapper[4985]: I0125 00:30:02.045719 4985 generic.go:334] "Generic (PLEG): container finished" podID="8477ac51-35d6-4330-8796-cf72569fa341" containerID="2ea13a1eb266b159ef5a05eda47f002c8db3f17ddbc0278d1c312c9a320fc251" exitCode=0 Jan 25 00:30:02 crc kubenswrapper[4985]: I0125 00:30:02.045800 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" event={"ID":"8477ac51-35d6-4330-8796-cf72569fa341","Type":"ContainerDied","Data":"2ea13a1eb266b159ef5a05eda47f002c8db3f17ddbc0278d1c312c9a320fc251"} Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.316750 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.384184 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2krt\" (UniqueName: \"kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt\") pod \"8477ac51-35d6-4330-8796-cf72569fa341\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.384489 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume\") pod \"8477ac51-35d6-4330-8796-cf72569fa341\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.384548 4985 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume\") pod \"8477ac51-35d6-4330-8796-cf72569fa341\" (UID: \"8477ac51-35d6-4330-8796-cf72569fa341\") " Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.384955 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume" (OuterVolumeSpecName: "config-volume") pod "8477ac51-35d6-4330-8796-cf72569fa341" (UID: "8477ac51-35d6-4330-8796-cf72569fa341"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.389615 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8477ac51-35d6-4330-8796-cf72569fa341" (UID: "8477ac51-35d6-4330-8796-cf72569fa341"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.389778 4985 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt" (OuterVolumeSpecName: "kube-api-access-c2krt") pod "8477ac51-35d6-4330-8796-cf72569fa341" (UID: "8477ac51-35d6-4330-8796-cf72569fa341"). InnerVolumeSpecName "kube-api-access-c2krt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.486009 4985 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2krt\" (UniqueName: \"kubernetes.io/projected/8477ac51-35d6-4330-8796-cf72569fa341-kube-api-access-c2krt\") on node \"crc\" DevicePath \"\"" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.486053 4985 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8477ac51-35d6-4330-8796-cf72569fa341-config-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:30:03 crc kubenswrapper[4985]: I0125 00:30:03.486062 4985 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8477ac51-35d6-4330-8796-cf72569fa341-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 25 00:30:04 crc kubenswrapper[4985]: I0125 00:30:04.062436 4985 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" event={"ID":"8477ac51-35d6-4330-8796-cf72569fa341","Type":"ContainerDied","Data":"9f7f32deb7fa0509493f9ea4763ecbf06ec4a908db2921d92f05990d15f30857"} Jan 25 00:30:04 crc kubenswrapper[4985]: I0125 00:30:04.062489 4985 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f7f32deb7fa0509493f9ea4763ecbf06ec4a908db2921d92f05990d15f30857" Jan 25 00:30:04 crc kubenswrapper[4985]: I0125 00:30:04.062540 4985 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29488350-w2n4r" Jan 25 00:30:05 crc kubenswrapper[4985]: I0125 00:30:05.836511 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:30:05 crc kubenswrapper[4985]: I0125 00:30:05.836579 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 25 00:30:35 crc kubenswrapper[4985]: I0125 00:30:35.836472 4985 patch_prober.go:28] interesting pod/machine-config-daemon-dddxc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 25 00:30:35 crc kubenswrapper[4985]: I0125 00:30:35.837318 4985 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-dddxc" podUID="5fa83abe-5c61-40a5-bf77-d8f929bdda78" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515135262107024447 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015135262110017356 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015135256707016520 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015135256710015462 5ustar corecore